diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 4b7c05ec2eb..0d116a32712 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -46,9 +46,9 @@ class ClusterFormationTasks { /** * Adds dependent tasks to the given task to start and stop a cluster with the given configuration. * - * Returns an object that will resolve at execution time of the given task to a uri for the cluster. + * Returns a NodeInfo object for the first node in the cluster. */ - static Object setup(Project project, Task task, ClusterConfiguration config) { + static NodeInfo setup(Project project, Task task, ClusterConfiguration config) { if (task.getEnabled() == false) { // no need to add cluster formation tasks if the task won't run! return @@ -66,7 +66,7 @@ class ClusterFormationTasks { task.dependsOn(wait) // delay the resolution of the uri by wrapping in a closure, so it is not used until read for tests - return "${-> nodes[0].transportUri()}" + return nodes[0] } /** Adds a dependency on the given distribution */ diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 5656be57b8f..3bfe9d61018 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -20,7 +20,6 @@ package org.elasticsearch.gradle.test import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin -import org.gradle.api.GradleException import org.gradle.api.Task import org.gradle.api.internal.tasks.options.Option import org.gradle.api.plugins.JavaBasePlugin @@ -61,8 +60,12 @@ public class RestIntegTestTask extends RandomizedTestingTask { // this must run after all projects have been configured, so we know any project // references can be accessed as a fully configured project.gradle.projectsEvaluated { - Object clusterUri = ClusterFormationTasks.setup(project, this, clusterConfig) - systemProperty('tests.cluster', clusterUri) + NodeInfo node = ClusterFormationTasks.setup(project, this, clusterConfig) + systemProperty('tests.rest.cluster', "${-> node.httpUri()}") + // TODO: our "client" qa tests currently use the rest-test plugin. instead they should have their own plugin + // that sets up the test cluster and passes this transport uri instead of http uri. Until then, we pass + // both as separate sysprops + systemProperty('tests.cluster', "${-> node.transportUri()}") } } diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 5f1a181fabb..67f256c6bd1 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -200,7 +200,7 @@ public class ActionModule extends AbstractModule { private final Map actions = new HashMap<>(); private final List> actionFilters = new ArrayList<>(); - static class ActionEntry { + static class ActionEntry, Response extends ActionResponse> { public final GenericAction action; public final Class> transportAction; public final Class[] supportTransportActions; @@ -229,7 +229,7 @@ public class ActionModule extends AbstractModule { * @param The request type. * @param The response type. */ - public void registerAction(GenericAction action, Class> transportAction, Class... supportTransportActions) { + public , Response extends ActionResponse> void registerAction(GenericAction action, Class> transportAction, Class... supportTransportActions) { actions.put(action.name(), new ActionEntry<>(action, transportAction, supportTransportActions)); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java index f5020a46b37..24e9cd05aad 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java @@ -22,14 +22,9 @@ package org.elasticsearch.action.admin.cluster.settings; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import static org.elasticsearch.cluster.ClusterState.builder; /** @@ -57,11 +52,11 @@ final class SettingsUpdater { boolean changed = false; Settings.Builder transientSettings = Settings.settingsBuilder(); transientSettings.put(currentState.metaData().transientSettings()); - changed |= apply(transientToApply, transientSettings, transientUpdates, "transient"); + changed |= clusterSettings.updateDynamicSettings(transientToApply, transientSettings, transientUpdates, "transient"); Settings.Builder persistentSettings = Settings.settingsBuilder(); persistentSettings.put(currentState.metaData().persistentSettings()); - changed |= apply(persistentToApply, persistentSettings, persistentUpdates, "persistent"); + changed |= clusterSettings.updateDynamicSettings(persistentToApply, persistentSettings, persistentUpdates, "persistent"); if (!changed) { return currentState; @@ -86,42 +81,5 @@ final class SettingsUpdater { return build; } - private boolean apply(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) { - boolean changed = false; - final Set toRemove = new HashSet<>(); - Settings.Builder settingsBuilder = Settings.settingsBuilder(); - for (Map.Entry entry : toApply.getAsMap().entrySet()) { - if (entry.getValue() == null) { - toRemove.add(entry.getKey()); - } else if (clusterSettings.isLoggerSetting(entry.getKey()) || clusterSettings.hasDynamicSetting(entry.getKey())) { - settingsBuilder.put(entry.getKey(), entry.getValue()); - updates.put(entry.getKey(), entry.getValue()); - changed = true; - } else { - throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable"); - } - } - changed |= applyDeletes(toRemove, target); - target.put(settingsBuilder.build()); - return changed; - } - - private final boolean applyDeletes(Set deletes, Settings.Builder builder) { - boolean changed = false; - for (String entry : deletes) { - Set keysToRemove = new HashSet<>(); - Set keySet = builder.internalMap().keySet(); - for (String key : keySet) { - if (Regex.simpleMatch(entry, key)) { - keysToRemove.add(key); - } - } - for (String key : keysToRemove) { - builder.remove(key); - changed = true; - } - } - return changed; - } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java index a76b714b31d..aaaf11e4534 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.replication.ReplicationRequest; +import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -37,7 +37,7 @@ import java.util.List; /** * Refresh action. */ -public class TransportRefreshAction extends TransportBroadcastReplicationAction { +public class TransportRefreshAction extends TransportBroadcastReplicationAction { @Inject public TransportRefreshAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, @@ -53,8 +53,8 @@ public class TransportRefreshAction extends TransportBroadcastReplicationAction< } @Override - protected ReplicationRequest newShardRequest(RefreshRequest request, ShardId shardId) { - return new ReplicationRequest(request, shardId); + protected BasicReplicationRequest newShardRequest(RefreshRequest request, ShardId shardId) { + return new BasicReplicationRequest(request, shardId); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index c78977fb362..7c9979e7374 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.replication.ReplicationRequest; +import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; @@ -41,7 +41,7 @@ import org.elasticsearch.transport.TransportService; /** * */ -public class TransportShardRefreshAction extends TransportReplicationAction { +public class TransportShardRefreshAction extends TransportReplicationAction { public static final String NAME = RefreshAction.NAME + "[s]"; @@ -51,7 +51,7 @@ public class TransportShardRefreshAction extends TransportReplicationAction shardOperationOnPrimary(MetaData metaData, ReplicationRequest shardRequest) throws Throwable { + protected Tuple shardOperationOnPrimary(MetaData metaData, BasicReplicationRequest shardRequest) throws Throwable { IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId().getIndex()).getShard(shardRequest.shardId().id()); indexShard.refresh("api"); logger.trace("{} refresh request executed on primary", indexShard.shardId()); @@ -68,7 +68,7 @@ public class TransportShardRefreshAction extends TransportReplicationAction { private final MetaDataIndexTemplateService indexTemplateService; + private final IndexScopedSettings indexScopedSettings; @Inject public TransportPutIndexTemplateAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, MetaDataIndexTemplateService indexTemplateService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IndexScopedSettings indexScopedSettings) { super(settings, PutIndexTemplateAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutIndexTemplateRequest::new); this.indexTemplateService = indexTemplateService; + this.indexScopedSettings = indexScopedSettings; } @Override @@ -69,11 +73,13 @@ public class TransportPutIndexTemplateAction extends TransportMasterNodeAction

scheduledFuture; private final AtomicLong executionIdGen = new AtomicLong(); @@ -250,24 +250,24 @@ public class BulkProcessor implements Closeable { * (for example, if no id is provided, one will be generated, or usage of the create flag). */ public BulkProcessor add(IndexRequest request) { - return add((ActionRequest) request); + return add((ActionRequest) request); } /** * Adds an {@link DeleteRequest} to the list of actions to execute. */ public BulkProcessor add(DeleteRequest request) { - return add((ActionRequest) request); + return add((ActionRequest) request); } /** * Adds either a delete or an index request. */ - public BulkProcessor add(ActionRequest request) { + public BulkProcessor add(ActionRequest request) { return add(request, null); } - public BulkProcessor add(ActionRequest request, @Nullable Object payload) { + public BulkProcessor add(ActionRequest request, @Nullable Object payload) { internalAdd(request, payload); return this; } @@ -282,7 +282,7 @@ public class BulkProcessor implements Closeable { } } - private synchronized void internalAdd(ActionRequest request, @Nullable Object payload) { + private synchronized void internalAdd(ActionRequest request, @Nullable Object payload) { ensureOpen(); bulkRequest.add(request, payload); executeIfNeeded(); diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 02e0ea40d65..00260644892 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -56,7 +56,7 @@ public class BulkRequest extends ActionRequest implements Composite private static final int REQUEST_OVERHEAD = 50; - final List requests = new ArrayList<>(); + final List> requests = new ArrayList<>(); List payloads = null; protected TimeValue timeout = BulkShardRequest.DEFAULT_TIMEOUT; @@ -72,21 +72,21 @@ public class BulkRequest extends ActionRequest implements Composite * Creates a bulk request caused by some other request, which is provided as an * argument so that its headers and context can be copied to the new request */ - public BulkRequest(ActionRequest request) { + public BulkRequest(ActionRequest request) { super(request); } /** * Adds a list of requests to be executed. Either index or delete requests. */ - public BulkRequest add(ActionRequest... requests) { - for (ActionRequest request : requests) { + public BulkRequest add(ActionRequest... requests) { + for (ActionRequest request : requests) { add(request, null); } return this; } - public BulkRequest add(ActionRequest request) { + public BulkRequest add(ActionRequest request) { return add(request, null); } @@ -96,7 +96,7 @@ public class BulkRequest extends ActionRequest implements Composite * @param payload Optional payload * @return the current bulk request */ - public BulkRequest add(ActionRequest request, @Nullable Object payload) { + public BulkRequest add(ActionRequest request, @Nullable Object payload) { if (request instanceof IndexRequest) { add((IndexRequest) request, payload); } else if (request instanceof DeleteRequest) { @@ -112,8 +112,8 @@ public class BulkRequest extends ActionRequest implements Composite /** * Adds a list of requests to be executed. Either index or delete requests. */ - public BulkRequest add(Iterable requests) { - for (ActionRequest request : requests) { + public BulkRequest add(Iterable> requests) { + for (ActionRequest request : requests) { add(request); } return this; @@ -196,15 +196,14 @@ public class BulkRequest extends ActionRequest implements Composite /** * The list of requests in this bulk request. */ - public List requests() { + public List> requests() { return this.requests; } @Override - @SuppressWarnings("unchecked") public List subRequests() { List indicesRequests = new ArrayList<>(); - for (ActionRequest request : requests) { + for (ActionRequest request : requests) { assert request instanceof IndicesRequest; indicesRequests.add((IndicesRequest) request); } @@ -486,7 +485,7 @@ public class BulkRequest extends ActionRequest implements Composite if (requests.isEmpty()) { validationException = addValidationError("no requests added", validationException); } - for (ActionRequest request : requests) { + for (ActionRequest request : requests) { // We first check if refresh has been set if ((request instanceof DeleteRequest && ((DeleteRequest)request).refresh()) || (request instanceof UpdateRequest && ((UpdateRequest)request).refresh()) || @@ -535,7 +534,7 @@ public class BulkRequest extends ActionRequest implements Composite super.writeTo(out); out.writeByte(consistencyLevel.id()); out.writeVInt(requests.size()); - for (ActionRequest request : requests) { + for (ActionRequest request : requests) { if (request instanceof IndexRequest) { out.writeByte((byte) 0); } else if (request instanceof DeleteRequest) { diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java index 7140af93ed0..c2ae538085b 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.SingleShardRequest; @@ -109,7 +108,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi } - public static class Request extends SingleShardRequest implements IndicesRequest { + public static class Request extends SingleShardRequest implements IndicesRequest { private int shardId; private String preference; @@ -237,7 +236,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi shardResponse.readFrom(in); items.add(new Item(slot, shardResponse)); } else { - items.add(new Item(slot, (Throwable)in.readThrowable())); + items.add(new Item(slot, in.readThrowable())); } } } diff --git a/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java b/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java index b80cf9ddbd4..6c08eec323f 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java @@ -40,13 +40,13 @@ public interface ActionFilter { * Enables filtering the execution of an action on the request side, either by sending a response through the * {@link ActionListener} or by continuing the execution through the given {@link ActionFilterChain chain} */ - void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain); + void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain); /** * Enables filtering the execution of an action on the response side, either by sending a response through the * {@link ActionListener} or by continuing the execution through the given {@link ActionFilterChain chain} */ - void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain); + void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain); /** * A simple base class for injectable action filters that spares the implementation from handling the @@ -60,7 +60,7 @@ public interface ActionFilter { } @Override - public final void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + public final void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { if (apply(action, request, listener)) { chain.proceed(task, action, request, listener); } @@ -70,10 +70,10 @@ public interface ActionFilter { * Applies this filter and returns {@code true} if the execution chain should proceed, or {@code false} * if it should be aborted since the filter already handled the request and called the given listener. */ - protected abstract boolean apply(String action, ActionRequest request, ActionListener listener); + protected abstract boolean apply(String action, ActionRequest request, ActionListener listener); @Override - public final void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public final void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { if (apply(action, response, listener)) { chain.proceed(action, response, listener); } @@ -83,6 +83,6 @@ public interface ActionFilter { * Applies this filter and returns {@code true} if the execution chain should proceed, or {@code false} * if it should be aborted since the filter already handled the response by calling the given listener. */ - protected abstract boolean apply(String action, ActionResponse response, ActionListener listener); + protected abstract boolean apply(String action, ActionResponse response, ActionListener listener); } } diff --git a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java index 93c96b24be3..c8f04852452 100644 --- a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java +++ b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java @@ -44,7 +44,7 @@ public final class AutoCreateIndex { @Inject public AutoCreateIndex(Settings settings, IndexNameExpressionResolver resolver) { this.resolver = resolver; - dynamicMappingDisabled = !settings.getAsBoolean(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, MapperService.INDEX_MAPPER_DYNAMIC_DEFAULT); + dynamicMappingDisabled = !MapperService.INDEX_MAPPER_DYNAMIC_SETTING.get(settings); String value = settings.get("action.auto_create_index"); if (value == null || Booleans.isExplicitTrue(value)) { needToCheck = true; diff --git a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index bd9556f0500..a439117fef6 100644 --- a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -34,8 +34,8 @@ import java.util.function.Supplier; /** * A TransportAction that self registers a handler into the transport service */ -public abstract class HandledTransportAction extends TransportAction{ - +public abstract class HandledTransportAction, Response extends ActionResponse> + extends TransportAction { protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request) { super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, request, ThreadPool.Names.SAME, new TransportHandler()); diff --git a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java index 3e0454550ba..eb62903bf34 100644 --- a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -40,7 +40,7 @@ import static org.elasticsearch.action.support.PlainActionFuture.newFuture; /** * */ -public abstract class TransportAction extends AbstractComponent { +public abstract class TransportAction, Response extends ActionResponse> extends AbstractComponent { protected final ThreadPool threadPool; protected final String actionName; @@ -66,7 +66,7 @@ public abstract class TransportAction listener) { + public final Task execute(Request request, ActionListener listener) { Task task = taskManager.register("transport", actionName, request); if (task == null) { execute(null, request, listener); @@ -85,6 +85,7 @@ public abstract class TransportAction listener) { @@ -114,7 +115,7 @@ public abstract class TransportAction listener); - private static class RequestFilterChain implements ActionFilterChain { + private static class RequestFilterChain, Response extends ActionResponse> implements ActionFilterChain { private final TransportAction action; private final AtomicInteger index = new AtomicInteger(); diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 3159c3ab2b6..be851cfa7e2 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -49,7 +49,7 @@ import java.util.function.Supplier; /** * */ -public abstract class TransportBroadcastAction +public abstract class TransportBroadcastAction, Response extends BroadcastResponse, ShardRequest extends BroadcastShardRequest, ShardResponse extends BroadcastShardResponse> extends HandledTransportAction { protected final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index e8f4a0d83cd..613de1aa923 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -74,7 +74,7 @@ import java.util.function.Supplier; * @param the response to the client request * @param per-shard operation results */ -public abstract class TransportBroadcastByNodeAction, Response extends BroadcastResponse, ShardOperationResult extends Streamable> extends HandledTransportAction { @@ -447,10 +447,12 @@ public abstract class TransportBroadcastByNodeAction extends HandledTransportAction { +public abstract class TransportMasterNodeAction, Response extends ActionResponse> extends HandledTransportAction { protected final TransportService transportService; protected final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java index d0f64cbb9be..681b9f0a648 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java @@ -33,7 +33,8 @@ import java.util.function.Supplier; * A base class for read operations that needs to be performed on the master node. * Can also be executed on the local node if needed. */ -public abstract class TransportMasterNodeReadAction extends TransportMasterNodeAction { +public abstract class TransportMasterNodeReadAction, Response extends ActionResponse> + extends TransportMasterNodeAction { public static final String FORCE_LOCAL_SETTING = "action.master.force_local"; diff --git a/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java b/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java index ec9c3eb46c3..7e42036c1d1 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java @@ -33,7 +33,8 @@ import java.util.function.Supplier; /** */ -public abstract class TransportClusterInfoAction extends TransportMasterNodeReadAction { +public abstract class TransportClusterInfoAction, Response extends ActionResponse> + extends TransportMasterNodeReadAction { public TransportClusterInfoAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index b83081b86bd..f90d194287b 100644 --- a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -50,7 +50,7 @@ import java.util.function.Supplier; /** * */ -public abstract class TransportNodesAction extends HandledTransportAction { +public abstract class TransportNodesAction, NodesResponse extends BaseNodesResponse, NodeRequest extends BaseNodeRequest, NodeResponse extends BaseNodeResponse> extends HandledTransportAction { protected final ClusterName clusterName; protected final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java new file mode 100644 index 00000000000..3778275d400 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support.replication; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.index.shard.ShardId; + +/** + * A replication request that has no more information than ReplicationRequest. + * Unfortunately ReplicationRequest can't be declared as a type parameter + * because it has a self referential type parameter of its own. So use this + * instead. + */ +public class BasicReplicationRequest extends ReplicationRequest { + public BasicReplicationRequest() { + + } + + /** + * Creates a new request that inherits headers and context from the request + * provided as argument. + */ + public BasicReplicationRequest(ActionRequest request) { + super(request); + } + + /** + * Creates a new request with resolved shard id + */ + public BasicReplicationRequest(ActionRequest request, ShardId shardId) { + super(request, shardId); + } + + /** + * Copy constructor that creates a new request that is a copy of the one + * provided as an argument. + */ + protected BasicReplicationRequest(BasicReplicationRequest request) { + super(request); + } + +} diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java index 9b956918306..a6c9b8f65a3 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java @@ -38,7 +38,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * */ -public class ReplicationRequest> extends ActionRequest implements IndicesRequest { +public abstract class ReplicationRequest> extends ActionRequest implements IndicesRequest { public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES); diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index 33a9d349e80..ab88d73d3b0 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.support.replication; import com.carrotsearch.hppc.cursors.IntObjectCursor; + import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ReplicationResponse; @@ -52,7 +53,8 @@ import java.util.function.Supplier; * Base class for requests that should be executed on all shards of an index or several indices. * This action sends shard requests to all primary shards of the indices and they are then replicated like write requests */ -public abstract class TransportBroadcastReplicationAction extends HandledTransportAction { +public abstract class TransportBroadcastReplicationAction, Response extends BroadcastResponse, ShardRequest extends ReplicationRequest, ShardResponse extends ReplicationResponse> + extends HandledTransportAction { private final TransportReplicationAction replicatedBroadcastShardAction; private final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 0014404057f..b2972201808 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -64,7 +64,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BaseTransportResponseHandler; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.EmptyTransportResponseHandler; -import org.elasticsearch.transport.ReceiveTimeoutTransportException; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportChannelResponseHandler; import org.elasticsearch.transport.TransportException; @@ -76,6 +75,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; @@ -90,9 +90,7 @@ import java.util.function.Supplier; * primary node to validate request before primary operation followed by sampling state again for resolving * nodes with replica copies to perform replication. */ -public abstract class TransportReplicationAction extends TransportAction { - - public static final String SHARD_FAILURE_TIMEOUT = "action.support.replication.shard.failure_timeout"; +public abstract class TransportReplicationAction, ReplicaRequest extends ReplicationRequest, Response extends ReplicationResponse> extends TransportAction { protected final TransportService transportService; protected final ClusterService clusterService; @@ -101,7 +99,6 @@ public abstract class TransportReplicationAction extends HandledTransportAction { - +public abstract class TransportInstanceSingleOperationAction, Response extends ActionResponse> + extends HandledTransportAction { protected final ClusterService clusterService; protected final TransportService transportService; diff --git a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 47eebc9cfcd..1a77a411a94 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -54,7 +54,7 @@ import static org.elasticsearch.action.support.TransportActions.isShardNotAvaila * the read operation can be performed on other shard copies. Concrete implementations can provide their own list * of candidate shards to try the read operation on. */ -public abstract class TransportSingleShardAction extends TransportAction { +public abstract class TransportSingleShardAction, Response extends ActionResponse> extends TransportAction { protected final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java b/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java index 4cf5a5a961d..d9ddc56d48a 100644 --- a/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java +++ b/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java @@ -40,7 +40,8 @@ public interface ElasticsearchClient { * @param The request builder type. * @return A future allowing to get back the response. */ - > ActionFuture execute(final Action action, final Request request); + , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + final Action action, final Request request); /** * Executes a generic action, denoted by an {@link Action}. @@ -52,7 +53,8 @@ public interface ElasticsearchClient { * @param The response type. * @param The request builder type. */ - > void execute(final Action action, final Request request, ActionListener listener); + , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + final Action action, final Request request, ActionListener listener); /** * Prepares a request builder to execute, specified by {@link Action}. @@ -63,7 +65,8 @@ public interface ElasticsearchClient { * @param The request builder. * @return The request builder, that can, at a later stage, execute the request. */ - > RequestBuilder prepareExecute(final Action action); + , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + final Action action); /** * Returns the threadpool used to execute requests on this client diff --git a/core/src/main/java/org/elasticsearch/client/FilterClient.java b/core/src/main/java/org/elasticsearch/client/FilterClient.java index 06d81f0c9d5..77abceef17a 100644 --- a/core/src/main/java/org/elasticsearch/client/FilterClient.java +++ b/core/src/main/java/org/elasticsearch/client/FilterClient.java @@ -52,7 +52,8 @@ public abstract class FilterClient extends AbstractClient { } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + Action action, Request request, ActionListener listener) { in().execute(action, request, listener); } diff --git a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java index 65adfad64dc..4f64f63f8d7 100644 --- a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -56,7 +56,8 @@ public class NodeClient extends AbstractClient { @SuppressWarnings("unchecked") @Override - public > void doExecute(Action action, Request request, ActionListener listener) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + Action action, Request request, ActionListener listener) { TransportAction transportAction = actions.get(action); if (transportAction == null) { throw new IllegalStateException("failed to find action [" + action + "] to execute"); diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index e5a465442bb..e5e1bea6cb5 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -363,12 +363,14 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public final > RequestBuilder prepareExecute(final Action action) { + public final , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + final Action action) { return action.newRequestBuilder(this); } @Override - public final > ActionFuture execute(Action action, Request request) { + public final , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + Action action, Request request) { PlainActionFuture actionFuture = PlainActionFuture.newFuture(); execute(action, request, actionFuture); return actionFuture; @@ -378,13 +380,14 @@ public abstract class AbstractClient extends AbstractComponent implements Client * This is the single execution point of *all* clients. */ @Override - public final > void execute(Action action, Request request, ActionListener listener) { + public final , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + Action action, Request request, ActionListener listener) { headers.applyTo(request); listener = threadedWrapper.wrap(listener); doExecute(action, request, listener); } - protected abstract > void doExecute(final Action action, final Request request, ActionListener listener); + protected abstract , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute(final Action action, final Request request, ActionListener listener); @Override public ActionFuture index(final IndexRequest request) { @@ -821,17 +824,20 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public > ActionFuture execute(Action action, Request request) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + Action action, Request request) { return client.execute(action, request); } @Override - public > void execute(Action action, Request request, ActionListener listener) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + Action action, Request request, ActionListener listener) { client.execute(action, request, listener); } @Override - public > RequestBuilder prepareExecute(Action action) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + Action action) { return client.prepareExecute(action); } @@ -1178,17 +1184,20 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public > ActionFuture execute(Action action, Request request) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + Action action, Request request) { return client.execute(action, request); } @Override - public > void execute(Action action, Request request, ActionListener listener) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + Action action, Request request, ActionListener listener) { client.execute(action, request, listener); } @Override - public > RequestBuilder prepareExecute(Action action) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + Action action) { return client.prepareExecute(action); } diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 3b8be668f43..3d68e642c46 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -19,6 +19,10 @@ package org.elasticsearch.client.transport; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; @@ -36,6 +40,7 @@ import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.ModulesBuilder; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -54,10 +59,6 @@ import org.elasticsearch.threadpool.ThreadPoolModule; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.netty.NettyTransport; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - import static org.elasticsearch.common.settings.Settings.settingsBuilder; /** @@ -81,7 +82,7 @@ public class TransportClient extends AbstractClient { */ public static class Builder { - private Settings settings = Settings.EMPTY; + private Settings providedSettings = Settings.EMPTY; private List> pluginClasses = new ArrayList<>(); /** @@ -95,7 +96,7 @@ public class TransportClient extends AbstractClient { * The settings to configure the transport client with. */ public Builder settings(Settings settings) { - this.settings = settings; + this.providedSettings = settings; return this; } @@ -107,27 +108,29 @@ public class TransportClient extends AbstractClient { return this; } + private PluginsService newPluginService(final Settings settings) { + final Settings.Builder settingsBuilder = settingsBuilder() + .put(NettyTransport.PING_SCHEDULE, "5s") // enable by default the transport schedule ping interval + .put( InternalSettingsPreparer.prepareSettings(settings)) + .put("network.server", false) + .put("node.client", true) + .put(CLIENT_TYPE_SETTING, CLIENT_TYPE); + return new PluginsService(settingsBuilder.build(), null, null, pluginClasses); + }; + /** * Builds a new instance of the transport client. */ public TransportClient build() { - Settings settings = InternalSettingsPreparer.prepareSettings(this.settings); - settings = settingsBuilder() - .put(NettyTransport.PING_SCHEDULE, "5s") // enable by default the transport schedule ping interval - .put(settings) - .put("network.server", false) - .put("node.client", true) - .put(CLIENT_TYPE_SETTING, CLIENT_TYPE) - .build(); - - PluginsService pluginsService = new PluginsService(settings, null, null, pluginClasses); - this.settings = pluginsService.updatedSettings(); + final PluginsService pluginsService = newPluginService(providedSettings); + final Settings settings = pluginsService.updatedSettings(); Version version = Version.CURRENT; final ThreadPool threadPool = new ThreadPool(settings); final NetworkService networkService = new NetworkService(settings); final SettingsFilter settingsFilter = new SettingsFilter(settings); + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); boolean success = false; try { ModulesBuilder modules = new ModulesBuilder(); @@ -137,18 +140,18 @@ public class TransportClient extends AbstractClient { modules.add(pluginModule); } modules.add(new PluginsModule(pluginsService)); - modules.add(new SettingsModule(this.settings, settingsFilter )); - modules.add(new NetworkModule(networkService, this.settings, true)); - modules.add(new ClusterNameModule(this.settings)); + modules.add(new SettingsModule(settings, settingsFilter )); + modules.add(new NetworkModule(networkService, settings, true, namedWriteableRegistry)); + modules.add(new ClusterNameModule(settings)); modules.add(new ThreadPoolModule(threadPool)); - modules.add(new SearchModule() { + modules.add(new SearchModule(settings, namedWriteableRegistry) { @Override protected void configure() { // noop } }); modules.add(new ActionModule(true)); - modules.add(new CircuitBreakerModule(this.settings)); + modules.add(new CircuitBreakerModule(settings)); pluginsService.processModules(modules); @@ -276,7 +279,7 @@ public class TransportClient extends AbstractClient { } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute(Action action, Request request, ActionListener listener) { proxy.execute(action, request, listener); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 670d7c09211..626b020c56c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction; import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateFilter; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; @@ -36,7 +35,6 @@ import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService; import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.RoutingService; -import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; @@ -56,26 +54,12 @@ import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocatio import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.InternalClusterService; -import org.elasticsearch.cluster.settings.DynamicSettings; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.gateway.GatewayAllocator; -import org.elasticsearch.gateway.PrimaryShardAllocator; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexingSlowLog; -import org.elasticsearch.index.search.stats.SearchSlowLog; -import org.elasticsearch.index.settings.IndexDynamicSettings; -import org.elasticsearch.index.MergePolicyConfig; -import org.elasticsearch.index.MergeSchedulerConfig; -import org.elasticsearch.index.store.IndexStore; -import org.elasticsearch.indices.IndicesWarmer; -import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.indices.ttl.IndicesTTLService; -import org.elasticsearch.search.internal.DefaultSearchContext; import java.util.Arrays; import java.util.Collections; @@ -106,7 +90,6 @@ public class ClusterModule extends AbstractModule { SnapshotInProgressAllocationDecider.class)); private final Settings settings; - private final DynamicSettings.Builder indexDynamicSettings = new DynamicSettings.Builder(); private final ExtensionPoint.SelectedType shardsAllocators = new ExtensionPoint.SelectedType<>("shards_allocator", ShardsAllocator.class); private final ExtensionPoint.ClassSet allocationDeciders = new ExtensionPoint.ClassSet<>("allocation_decider", AllocationDecider.class, AllocationDeciders.class); private final ExtensionPoint.ClassSet indexTemplateFilters = new ExtensionPoint.ClassSet<>("index_template_filter", IndexTemplateFilter.class); @@ -116,9 +99,6 @@ public class ClusterModule extends AbstractModule { public ClusterModule(Settings settings) { this.settings = settings; - - registerBuiltinIndexSettings(); - for (Class decider : ClusterModule.DEFAULT_ALLOCATION_DECIDERS) { registerAllocationDecider(decider); } @@ -126,68 +106,6 @@ public class ClusterModule extends AbstractModule { registerShardsAllocator(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR, BalancedShardsAllocator.class); } - private void registerBuiltinIndexSettings() { - registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); - registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_TYPE, Validator.EMPTY); - registerIndexDynamicSetting(MergeSchedulerConfig.MAX_THREAD_COUNT, Validator.NON_NEGATIVE_INTEGER); - registerIndexDynamicSetting(MergeSchedulerConfig.MAX_MERGE_COUNT, Validator.EMPTY); - registerIndexDynamicSetting(MergeSchedulerConfig.AUTO_THROTTLE, Validator.EMPTY); - registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_REQUIRE_GROUP + "*", Validator.EMPTY); - registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "*", Validator.EMPTY); - registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "*", Validator.EMPTY); - registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY); - registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, Validator.NON_NEGATIVE_INTEGER); - registerIndexDynamicSetting(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_READ_ONLY, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_BLOCKS_READ, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_BLOCKS_WRITE, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_BLOCKS_METADATA, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_PRIORITY, Validator.NON_NEGATIVE_INTEGER); - registerIndexDynamicSetting(IndicesTTLService.INDEX_TTL_DISABLE_PURGE, Validator.EMPTY); - registerIndexDynamicSetting(IndexSettings.INDEX_REFRESH_INTERVAL, Validator.TIME); - registerIndexDynamicSetting(PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS, Validator.EMPTY); - registerIndexDynamicSetting(IndexSettings.INDEX_GC_DELETES_SETTING, Validator.TIME); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN, Validator.TIME); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO, Validator.TIME); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG, Validator.TIME); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE, Validator.TIME); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT, Validator.EMPTY); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL, Validator.EMPTY); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG, Validator.EMPTY); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT, Validator.EMPTY); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL, Validator.EMPTY); - registerIndexDynamicSetting(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE, Validator.INTEGER); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED, Validator.DOUBLE); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT, Validator.BYTES_SIZE); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, Validator.INTEGER_GTE_2); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT, Validator.INTEGER_GTE_2); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT, Validator.BYTES_SIZE); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, Validator.DOUBLE_GTE_2); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, Validator.NON_NEGATIVE_DOUBLE); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_COMPOUND_FORMAT, Validator.EMPTY); - registerIndexDynamicSetting(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, Validator.BYTES_SIZE); - registerIndexDynamicSetting(IndexSettings.INDEX_TRANSLOG_DURABILITY, Validator.EMPTY); - registerIndexDynamicSetting(IndicesWarmer.INDEX_WARMER_ENABLED, Validator.EMPTY); - registerIndexDynamicSetting(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); - registerIndexDynamicSetting(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, Validator.TIME); - registerIndexDynamicSetting(DefaultSearchContext.MAX_RESULT_WINDOW, Validator.POSITIVE_INTEGER); - } - - public void registerIndexDynamicSetting(String setting, Validator validator) { - indexDynamicSettings.addSetting(setting, validator); - } - - public void registerAllocationDecider(Class allocationDecider) { allocationDeciders.registerExtension(allocationDecider); } @@ -202,8 +120,6 @@ public class ClusterModule extends AbstractModule { @Override protected void configure() { - bind(DynamicSettings.class).annotatedWith(IndexDynamicSettings.class).toInstance(indexDynamicSettings.build()); - // bind ShardsAllocator String shardsAllocatorType = shardsAllocators.bindType(binder(), settings, ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, ClusterModule.BALANCED_ALLOCATOR); if (shardsAllocatorType.equals(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR)) { diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 00a238504f2..3a837d81a7e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -22,9 +22,11 @@ package org.elasticsearch.cluster.action.shard; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.MasterNodeChangePredicate; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -42,13 +44,16 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.NodeDisconnectedException; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; -import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; @@ -60,55 +65,95 @@ import java.util.Locale; import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEntry; public class ShardStateAction extends AbstractComponent { + public static final String SHARD_STARTED_ACTION_NAME = "internal:cluster/shard/started"; public static final String SHARD_FAILED_ACTION_NAME = "internal:cluster/shard/failure"; private final TransportService transportService; + private final ClusterService clusterService; @Inject public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) { super(settings); this.transportService = transportService; + this.clusterService = clusterService; transportService.registerRequestHandler(SHARD_STARTED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardStartedTransportHandler(clusterService, new ShardStartedClusterStateTaskExecutor(allocationService, logger), logger)); transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler(clusterService, new ShardFailedClusterStateTaskExecutor(allocationService, routingService, logger), logger)); } - public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { - shardFailed(clusterState, shardRouting, indexUUID, message, failure, null, listener); - } - - public void resendShardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { - logger.trace("{} re-sending failed shard [{}], index UUID [{}], reason [{}]", shardRouting.shardId(), failure, shardRouting, indexUUID, message); - shardFailed(clusterState, shardRouting, indexUUID, message, failure, listener); - } - - public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, TimeValue timeout, Listener listener) { - DiscoveryNode masterNode = clusterState.nodes().masterNode(); + private void sendShardAction(final String actionName, final ClusterStateObserver observer, final ShardRoutingEntry shardRoutingEntry, final Listener listener) { + DiscoveryNode masterNode = observer.observedState().nodes().masterNode(); if (masterNode == null) { - logger.warn("{} no master known to fail shard [{}]", shardRouting.shardId(), shardRouting); - listener.onShardFailedNoMaster(); - return; - } - ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, failure); - TransportRequestOptions options = TransportRequestOptions.EMPTY; - if (timeout != null) { - options = TransportRequestOptions.builder().withTimeout(timeout).build(); - } - transportService.sendRequest(masterNode, - SHARD_FAILED_ACTION_NAME, shardRoutingEntry, options, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { - @Override - public void handleResponse(TransportResponse.Empty response) { - listener.onSuccess(); - } + logger.warn("{} no master known for action [{}] for shard [{}]", shardRoutingEntry.getShardRouting().shardId(), actionName, shardRoutingEntry.getShardRouting()); + waitForNewMasterAndRetry(actionName, observer, shardRoutingEntry, listener); + } else { + logger.debug("{} sending [{}] to [{}] for shard [{}]", shardRoutingEntry.getShardRouting().getId(), actionName, masterNode.getId(), shardRoutingEntry); + transportService.sendRequest(masterNode, + actionName, shardRoutingEntry, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { + @Override + public void handleResponse(TransportResponse.Empty response) { + listener.onSuccess(); + } - @Override - public void handleException(TransportException exp) { - logger.warn("{} unexpected failure while sending request to [{}] to fail shard [{}]", exp, shardRoutingEntry.shardRouting.shardId(), masterNode, shardRoutingEntry); - listener.onShardFailedFailure(masterNode, exp); + @Override + public void handleException(TransportException exp) { + if (isMasterChannelException(exp)) { + waitForNewMasterAndRetry(actionName, observer, shardRoutingEntry, listener); + } else { + logger.warn("{} unexpected failure while sending request [{}] to [{}] for shard [{}]", exp, shardRoutingEntry.getShardRouting().shardId(), actionName, masterNode, shardRoutingEntry); + listener.onFailure(exp.getCause()); + } + } + }); + } + } + + private static Class[] MASTER_CHANNEL_EXCEPTIONS = new Class[]{ + NotMasterException.class, + ConnectTransportException.class, + Discovery.FailedToCommitClusterStateException.class + }; + + private static boolean isMasterChannelException(TransportException exp) { + return ExceptionsHelper.unwrap(exp, MASTER_CHANNEL_EXCEPTIONS) != null; + } + + public void shardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { + ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger); + ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, failure); + sendShardAction(SHARD_FAILED_ACTION_NAME, observer, shardRoutingEntry, listener); + } + + public void resendShardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { + logger.trace("{} re-sending failed shard [{}], index UUID [{}], reason [{}]", shardRouting.shardId(), failure, shardRouting, indexUUID, message); + shardFailed(shardRouting, indexUUID, message, failure, listener); + } + + // visible for testing + protected void waitForNewMasterAndRetry(String actionName, ClusterStateObserver observer, ShardRoutingEntry shardRoutingEntry, Listener listener) { + observer.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + if (logger.isTraceEnabled()) { + logger.trace("new cluster state [{}] after waiting for master election to fail shard [{}]", shardRoutingEntry.getShardRouting().shardId(), state.prettyPrint(), shardRoutingEntry); } - }); + sendShardAction(actionName, observer, shardRoutingEntry, listener); + } + + @Override + public void onClusterServiceClose() { + logger.warn("{} node closed while execution action [{}] for shard [{}]", shardRoutingEntry.failure, shardRoutingEntry.getShardRouting().getId(), actionName, shardRoutingEntry.getShardRouting()); + listener.onFailure(new NodeClosedException(clusterService.localNode())); + } + + @Override + public void onTimeout(TimeValue timeout) { + // we wait indefinitely for a new master + assert false; + } + }, MasterNodeChangePredicate.INSTANCE); } private static class ShardFailedTransportHandler implements TransportRequestHandler { @@ -208,21 +253,10 @@ public class ShardStateAction extends AbstractComponent { } } - public void shardStarted(final ClusterState clusterState, final ShardRouting shardRouting, String indexUUID, final String reason) { - DiscoveryNode masterNode = clusterState.nodes().masterNode(); - if (masterNode == null) { - logger.warn("{} no master known to start shard [{}]", shardRouting.shardId(), shardRouting); - return; - } - ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, reason, null); - logger.debug("sending start shard [{}]", shardRoutingEntry); - transportService.sendRequest(masterNode, - SHARD_STARTED_ACTION_NAME, new ShardRoutingEntry(shardRouting, indexUUID, reason, null), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { - @Override - public void handleException(TransportException exp) { - logger.warn("{} failure sending start shard [{}] to [{}]", exp, shardRouting.shardId(), masterNode, shardRouting); - } - }); + public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String message, Listener listener) { + ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger); + ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, null); + sendShardAction(SHARD_STARTED_ACTION_NAME, observer, shardRoutingEntry, listener); } private static class ShardStartedTransportHandler implements TransportRequestHandler { @@ -334,10 +368,23 @@ public class ShardStateAction extends AbstractComponent { default void onSuccess() { } - default void onShardFailedNoMaster() { - } - - default void onShardFailedFailure(final DiscoveryNode master, final TransportException e) { + /** + * Notification for non-channel exceptions that are not handled + * by {@link ShardStateAction}. + * + * The exceptions that are handled by {@link ShardStateAction} + * are: + * - {@link NotMasterException} + * - {@link NodeDisconnectedException} + * - {@link Discovery.FailedToCommitClusterStateException} + * + * Any other exception is communicated to the requester via + * this notification. + * + * @param t the unexpected cause of the failure on the master + */ + default void onFailure(final Throwable t) { } } + } diff --git a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index 1bab607ee85..0006c7da8c9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -306,16 +306,16 @@ public class ClusterBlocks extends AbstractDiffable { if (indexMetaData.getState() == IndexMetaData.State.CLOSE) { addIndexBlock(indexMetaData.getIndex(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK); } - if (indexMetaData.getSettings().getAsBoolean(IndexMetaData.SETTING_READ_ONLY, false)) { + if (IndexMetaData.INDEX_READ_ONLY_SETTING.get(indexMetaData.getSettings())) { addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_ONLY_BLOCK); } - if (indexMetaData.getSettings().getAsBoolean(IndexMetaData.SETTING_BLOCKS_READ, false)) { + if (IndexMetaData.INDEX_BLOCKS_READ_SETTING.get(indexMetaData.getSettings())) { addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_BLOCK); } - if (indexMetaData.getSettings().getAsBoolean(IndexMetaData.SETTING_BLOCKS_WRITE, false)) { + if (IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.get(indexMetaData.getSettings())) { addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK); } - if (indexMetaData.getSettings().getAsBoolean(IndexMetaData.SETTING_BLOCKS_METADATA, false)) { + if (IndexMetaData.INDEX_BLOCKS_METADATA_SETTING.get(indexMetaData.getSettings())) { addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK); } return this; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java new file mode 100644 index 00000000000..d9b288bb897 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.settings.Setting; + +/** + * This class acts as a functional wrapper around the index.auto_expand_replicas setting. + * This setting or rather it's value is expanded into a min and max value which requires special handling + * based on the number of datanodes in the cluster. This class handles all the parsing and streamlines the access to these values. + */ +final class AutoExpandReplicas { + // the value we recognize in the "max" position to mean all the nodes + private static final String ALL_NODES_VALUE = "all"; + public static final Setting SETTING = new Setting<>(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "false", (value) -> { + final int min; + final int max; + if (Booleans.parseBoolean(value, true) == false) { + return new AutoExpandReplicas(0, 0, false); + } + final int dash = value.indexOf('-'); + if (-1 == dash) { + throw new IllegalArgumentException("failed to parse [" + IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS + "] from value: [" + value + "] at index " + dash); + } + final String sMin = value.substring(0, dash); + try { + min = Integer.parseInt(sMin); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("failed to parse [" + IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS + "] from value: [" + value + "] at index " + dash, e); + } + String sMax = value.substring(dash + 1); + if (sMax.equals(ALL_NODES_VALUE)) { + max = Integer.MAX_VALUE; + } else { + try { + max = Integer.parseInt(sMax); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("failed to parse [" + IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS + "] from value: [" + value + "] at index " + dash, e); + } + } + return new AutoExpandReplicas(min, max, true); + }, true, Setting.Scope.INDEX); + + private final int minReplicas; + private final int maxReplicas; + private final boolean enabled; + + private AutoExpandReplicas(int minReplicas, int maxReplicas, boolean enabled) { + if (minReplicas > maxReplicas) { + throw new IllegalArgumentException("[" + IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS + "] minReplicas must be =< maxReplicas but wasn't " + minReplicas + " > " + maxReplicas); + } + this.minReplicas = minReplicas; + this.maxReplicas = maxReplicas; + this.enabled = enabled; + } + + int getMinReplicas() { + return minReplicas; + } + + int getMaxReplicas(int numDataNodes) { + return Math.min(maxReplicas, numDataNodes-1); + } + + @Override + public String toString() { + return enabled ? minReplicas + "-" + maxReplicas : "false"; + } + + boolean isEnabled() { + return enabled; + } +} + + diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index c1f10079768..b2772c0b2c1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -29,14 +29,17 @@ import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.node.DiscoveryNodeFilters; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.FromXContentBuilder; @@ -58,6 +61,7 @@ import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR; @@ -70,10 +74,6 @@ import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; */ public class IndexMetaData implements Diffable, FromXContentBuilder, ToXContent { - public static final IndexMetaData PROTO = IndexMetaData.builder("") - .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1).numberOfReplicas(0).build(); - public interface Custom extends Diffable, ToXContent { String type(); @@ -152,14 +152,29 @@ public class IndexMetaData implements Diffable, FromXContentBuild } public static final String INDEX_SETTING_PREFIX = "index."; public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards"; + public static final Setting INDEX_NUMBER_OF_SHARDS_SETTING = Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 5, 1, false, Setting.Scope.INDEX); public static final String SETTING_NUMBER_OF_REPLICAS = "index.number_of_replicas"; + public static final Setting INDEX_NUMBER_OF_REPLICAS_SETTING = Setting.intSetting(SETTING_NUMBER_OF_REPLICAS, 1, 0, true, Setting.Scope.INDEX); public static final String SETTING_SHADOW_REPLICAS = "index.shadow_replicas"; + public static final Setting INDEX_SHADOW_REPLICAS_SETTING = Setting.boolSetting(SETTING_SHADOW_REPLICAS, false, false, Setting.Scope.INDEX); + public static final String SETTING_SHARED_FILESYSTEM = "index.shared_filesystem"; + public static final Setting INDEX_SHARED_FILESYSTEM_SETTING = Setting.boolSetting(SETTING_SHARED_FILESYSTEM, false, false, Setting.Scope.INDEX); + public static final String SETTING_AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas"; + public static final Setting INDEX_AUTO_EXPAND_REPLICAS_SETTING = AutoExpandReplicas.SETTING; public static final String SETTING_READ_ONLY = "index.blocks.read_only"; + public static final Setting INDEX_READ_ONLY_SETTING = Setting.boolSetting(SETTING_READ_ONLY, false, true, Setting.Scope.INDEX); + public static final String SETTING_BLOCKS_READ = "index.blocks.read"; + public static final Setting INDEX_BLOCKS_READ_SETTING = Setting.boolSetting(SETTING_BLOCKS_READ, false, true, Setting.Scope.INDEX); + public static final String SETTING_BLOCKS_WRITE = "index.blocks.write"; + public static final Setting INDEX_BLOCKS_WRITE_SETTING = Setting.boolSetting(SETTING_BLOCKS_WRITE, false, true, Setting.Scope.INDEX); + public static final String SETTING_BLOCKS_METADATA = "index.blocks.metadata"; + public static final Setting INDEX_BLOCKS_METADATA_SETTING = Setting.boolSetting(SETTING_BLOCKS_METADATA, false, true, Setting.Scope.INDEX); + public static final String SETTING_VERSION_CREATED = "index.version.created"; public static final String SETTING_VERSION_CREATED_STRING = "index.version.created_string"; public static final String SETTING_VERSION_UPGRADED = "index.version.upgraded"; @@ -167,12 +182,23 @@ public class IndexMetaData implements Diffable, FromXContentBuild public static final String SETTING_VERSION_MINIMUM_COMPATIBLE = "index.version.minimum_compatible"; public static final String SETTING_CREATION_DATE = "index.creation_date"; public static final String SETTING_PRIORITY = "index.priority"; + public static final Setting INDEX_PRIORITY_SETTING = Setting.intSetting("index.priority", 1, 0, true, Setting.Scope.INDEX); public static final String SETTING_CREATION_DATE_STRING = "index.creation_date_string"; public static final String SETTING_INDEX_UUID = "index.uuid"; public static final String SETTING_DATA_PATH = "index.data_path"; + public static final Setting INDEX_DATA_PATH_SETTING = new Setting<>(SETTING_DATA_PATH, "", Function.identity(), false, Setting.Scope.INDEX); public static final String SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE = "index.shared_filesystem.recover_on_any_node"; + public static final Setting INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING = Setting.boolSetting(SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false, true, Setting.Scope.INDEX); public static final String INDEX_UUID_NA_VALUE = "_na_"; + public static final Setting INDEX_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.require.", true, Setting.Scope.INDEX); + public static final Setting INDEX_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.include.", true, Setting.Scope.INDEX); + public static final Setting INDEX_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.exclude.", true, Setting.Scope.INDEX); + + public static final IndexMetaData PROTO = IndexMetaData.builder("") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1).numberOfReplicas(0).build(); + public static final String KEY_ACTIVE_ALLOCATIONS = "active_allocations"; private final int numberOfShards; @@ -627,10 +653,6 @@ public class IndexMetaData implements Diffable, FromXContentBuild return this; } - public long creationDate() { - return settings.getAsLong(SETTING_CREATION_DATE, -1l); - } - public Builder settings(Settings.Builder settings) { this.settings = settings.build(); return this; @@ -645,11 +667,6 @@ public class IndexMetaData implements Diffable, FromXContentBuild return mappings.get(type); } - public Builder removeMapping(String mappingType) { - mappings.remove(mappingType); - return this; - } - public Builder putMapping(String type, String source) throws IOException { try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) { putMapping(new MappingMetaData(type, parser.mapOrdered())); @@ -692,24 +709,11 @@ public class IndexMetaData implements Diffable, FromXContentBuild return this; } - public Builder removeCustom(String type) { - this.customs.remove(type); - return this; - } - - public Custom getCustom(String type) { - return this.customs.get(type); - } - public Builder putActiveAllocationIds(int shardId, Set allocationIds) { activeAllocationIds.put(shardId, new HashSet(allocationIds)); return this; } - public Set getActiveAllocationIds(int shardId) { - return activeAllocationIds.get(shardId); - } - public long version() { return this.version; } @@ -758,22 +762,21 @@ public class IndexMetaData implements Diffable, FromXContentBuild filledActiveAllocationIds.put(i, Collections.emptySet()); } } - - Map requireMap = settings.getByPrefix("index.routing.allocation.require.").getAsMap(); + final Map requireMap = INDEX_ROUTING_REQUIRE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters requireFilters; if (requireMap.isEmpty()) { requireFilters = null; } else { requireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); } - Map includeMap = settings.getByPrefix("index.routing.allocation.include.").getAsMap(); + Map includeMap = INDEX_ROUTING_INCLUDE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters includeFilters; if (includeMap.isEmpty()) { includeFilters = null; } else { includeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); } - Map excludeMap = settings.getByPrefix("index.routing.allocation.exclude.").getAsMap(); + Map excludeMap = INDEX_ROUTING_EXCLUDE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters excludeFilters; if (excludeMap.isEmpty()) { excludeFilters = null; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index b2c9e500f66..2344e9af77c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -103,13 +104,14 @@ public class MetaDataCreateIndexService extends AbstractComponent { private final IndexTemplateFilter indexTemplateFilter; private final Environment env; private final NodeServicesProvider nodeServicesProvider; + private final IndexScopedSettings indexScopedSettings; @Inject public MetaDataCreateIndexService(Settings settings, ClusterService clusterService, IndicesService indicesService, AllocationService allocationService, Version version, AliasValidator aliasValidator, - Set indexTemplateFilters, Environment env, NodeServicesProvider nodeServicesProvider) { + Set indexTemplateFilters, Environment env, NodeServicesProvider nodeServicesProvider, IndexScopedSettings indexScopedSettings) { super(settings); this.clusterService = clusterService; this.indicesService = indicesService; @@ -118,6 +120,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { this.aliasValidator = aliasValidator; this.env = env; this.nodeServicesProvider = nodeServicesProvider; + this.indexScopedSettings = indexScopedSettings; if (indexTemplateFilters.isEmpty()) { this.indexTemplateFilter = DEFAULT_INDEX_TEMPLATE_FILTER; @@ -174,6 +177,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { public void createIndex(final CreateIndexClusterStateUpdateRequest request, final ActionListener listener) { Settings.Builder updatedSettingsBuilder = Settings.settingsBuilder(); updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX); + indexScopedSettings.validate(updatedSettingsBuilder); request.settings(updatedSettingsBuilder.build()); clusterService.submitStateUpdateTask("create-index [" + request.index() + "], cause [" + request.cause() + "]", @@ -313,7 +317,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { // first, add the default mapping if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { try { - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false, request.updateAllTypes()); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); } catch (Exception e) { removalReason = "failed on parsing default mapping on index creation"; throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, MapperService.DEFAULT_MAPPING, e.getMessage()); @@ -325,7 +329,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { } try { // apply the default here, its the first time we parse it - mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true, request.updateAllTypes()); + mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); } catch (Exception e) { removalReason = "failed on parsing mappings on index creation"; throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); @@ -460,16 +464,17 @@ public class MetaDataCreateIndexService extends AbstractComponent { } List getIndexSettingsValidationErrors(Settings settings) { - String customPath = settings.get(IndexMetaData.SETTING_DATA_PATH, null); + String customPath = IndexMetaData.INDEX_DATA_PATH_SETTING.get(settings); List validationErrors = new ArrayList<>(); - if (customPath != null && env.sharedDataFile() == null) { + if (Strings.isEmpty(customPath) == false && env.sharedDataFile() == null) { validationErrors.add("path.shared_data must be set in order to use custom data paths"); - } else if (customPath != null) { + } else if (Strings.isEmpty(customPath) == false) { Path resolvedPath = PathUtils.get(new Path[]{env.sharedDataFile()}, customPath); if (resolvedPath == null) { validationErrors.add("custom path [" + customPath + "] is not a sub-path of path.shared_data [" + env.sharedDataFile() + "]"); } } + //norelease - this can be removed? Integer number_of_primaries = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null); Integer number_of_replicas = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, null); if (number_of_primaries != null && number_of_primaries <= 0) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index 32a66bfb764..1e9f968f7a6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -104,12 +104,9 @@ public class MetaDataIndexAliasesService extends AbstractComponent { // temporarily create the index and add mappings so we can parse the filter try { indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList()); - if (indexMetaData.getMappings().containsKey(MapperService.DEFAULT_MAPPING)) { - indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.getMappings().get(MapperService.DEFAULT_MAPPING).source(), false, false); - } for (ObjectCursor cursor : indexMetaData.getMappings().values()) { MappingMetaData mappingMetaData = cursor.value; - indexService.mapperService().merge(mappingMetaData.type(), mappingMetaData.source(), false, false); + indexService.mapperService().merge(mappingMetaData.type(), mappingMetaData.source(), MapperService.MergeReason.MAPPING_RECOVERY, false); } } catch (Exception e) { logger.warn("[{}] failed to temporary create in order to apply alias action", e, indexMetaData.getIndex()); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index 07e11c692b2..c6ec2a4376d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -21,7 +21,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; -import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -70,7 +69,6 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { } checkSupportedVersion(indexMetaData); IndexMetaData newMetaData = indexMetaData; - newMetaData = addDefaultUnitsIfNeeded(newMetaData); checkMappingsCompatibility(newMetaData); newMetaData = markAsUpgraded(newMetaData); return newMetaData; @@ -113,103 +111,6 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { return false; } - /** All known byte-sized settings for an index. */ - public static final Set INDEX_BYTES_SIZE_SETTINGS = unmodifiableSet(newHashSet( - "index.merge.policy.floor_segment", - "index.merge.policy.max_merged_segment", - "index.merge.policy.max_merge_size", - "index.merge.policy.min_merge_size", - "index.shard.recovery.file_chunk_size", - "index.shard.recovery.translog_size", - "index.store.throttle.max_bytes_per_sec", - "index.translog.flush_threshold_size", - "index.translog.fs.buffer_size", - "index.version_map_size")); - - /** All known time settings for an index. */ - public static final Set INDEX_TIME_SETTINGS = unmodifiableSet(newHashSet( - "index.gateway.wait_for_mapping_update_post_recovery", - "index.shard.wait_for_mapping_update_post_recovery", - "index.gc_deletes", - "index.indexing.slowlog.threshold.index.debug", - "index.indexing.slowlog.threshold.index.info", - "index.indexing.slowlog.threshold.index.trace", - "index.indexing.slowlog.threshold.index.warn", - "index.refresh_interval", - "index.search.slowlog.threshold.fetch.debug", - "index.search.slowlog.threshold.fetch.info", - "index.search.slowlog.threshold.fetch.trace", - "index.search.slowlog.threshold.fetch.warn", - "index.search.slowlog.threshold.query.debug", - "index.search.slowlog.threshold.query.info", - "index.search.slowlog.threshold.query.trace", - "index.search.slowlog.threshold.query.warn", - "index.shadow.wait_for_initial_commit", - "index.store.stats_refresh_interval", - "index.translog.flush_threshold_period", - "index.translog.interval", - "index.translog.sync_interval", - "index.shard.inactive_time", - UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING)); - - /** - * Elasticsearch 2.0 requires units on byte/memory and time settings; this method adds the default unit to any such settings that are - * missing units. - */ - private IndexMetaData addDefaultUnitsIfNeeded(IndexMetaData indexMetaData) { - if (indexMetaData.getCreationVersion().before(Version.V_2_0_0_beta1)) { - // TODO: can we somehow only do this *once* for a pre-2.0 index? Maybe we could stuff a "fake marker setting" here? Seems hackish... - // Created lazily if we find any settings that are missing units: - Settings settings = indexMetaData.getSettings(); - Settings.Builder newSettings = null; - for(String byteSizeSetting : INDEX_BYTES_SIZE_SETTINGS) { - String value = settings.get(byteSizeSetting); - if (value != null) { - try { - Long.parseLong(value); - } catch (NumberFormatException nfe) { - continue; - } - // It's a naked number that previously would be interpreted as default unit (bytes); now we add it: - logger.warn("byte-sized index setting [{}] with value [{}] is missing units; assuming default units (b) but in future versions this will be a hard error", byteSizeSetting, value); - if (newSettings == null) { - newSettings = Settings.builder(); - newSettings.put(settings); - } - newSettings.put(byteSizeSetting, value + "b"); - } - } - for(String timeSetting : INDEX_TIME_SETTINGS) { - String value = settings.get(timeSetting); - if (value != null) { - try { - Long.parseLong(value); - } catch (NumberFormatException nfe) { - continue; - } - // It's a naked number that previously would be interpreted as default unit (ms); now we add it: - logger.warn("time index setting [{}] with value [{}] is missing units; assuming default units (ms) but in future versions this will be a hard error", timeSetting, value); - if (newSettings == null) { - newSettings = Settings.builder(); - newSettings.put(settings); - } - newSettings.put(timeSetting, value + "ms"); - } - } - if (newSettings != null) { - // At least one setting was changed: - return IndexMetaData.builder(indexMetaData) - .version(indexMetaData.getVersion()) - .settings(newSettings.build()) - .build(); - } - } - - // No changes: - return indexMetaData; - } - - /** * Checks the mappings for compatibility with the current version */ @@ -217,14 +118,14 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { try { // We cannot instantiate real analysis server at this point because the node might not have // been started yet. However, we don't really need real analyzers at this stage - so we can fake it - IndexSettings indexSettings = new IndexSettings(indexMetaData, this.settings, Collections.emptyList()); + IndexSettings indexSettings = new IndexSettings(indexMetaData, this.settings); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); try (AnalysisService analysisService = new FakeAnalysisService(indexSettings)) { try (MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, () -> null)) { for (ObjectCursor cursor : indexMetaData.getMappings().values()) { MappingMetaData mappingMetaData = cursor.value; - mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), false, false); + mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), MapperService.MergeReason.MAPPING_RECOVERY, false); } } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 14f9f500c45..1d13fc2079e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -143,7 +143,7 @@ public class MetaDataMappingService extends AbstractComponent { removeIndex = true; for (ObjectCursor metaData : indexMetaData.getMappings().values()) { // don't apply the default mapping, it has been applied when the mapping was created - indexService.mapperService().merge(metaData.value.type(), metaData.value.source(), false, true); + indexService.mapperService().merge(metaData.value.type(), metaData.value.source(), MapperService.MergeReason.MAPPING_RECOVERY, true); } } @@ -223,7 +223,7 @@ public class MetaDataMappingService extends AbstractComponent { IndexService indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList()); // add mappings for all types, we need them for cross-type validation for (ObjectCursor mapping : indexMetaData.getMappings().values()) { - indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), false, request.updateAllTypes()); + indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), MapperService.MergeReason.MAPPING_RECOVERY, request.updateAllTypes()); } } } @@ -303,7 +303,7 @@ public class MetaDataMappingService extends AbstractComponent { if (existingMapper != null) { existingSource = existingMapper.mappingSource(); } - DocumentMapper mergedMapper = indexService.mapperService().merge(mappingType, mappingUpdateSource, true, request.updateAllTypes()); + DocumentMapper mergedMapper = indexService.mapperService().merge(mappingType, mappingUpdateSource, MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 35c9c51143f..8e9dbc6b673 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -30,19 +30,20 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; +import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.settings.DynamicSettings; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Priority; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.settings.IndexDynamicSettings; +import org.elasticsearch.index.IndexNotFoundException; import java.util.ArrayList; import java.util.HashMap; @@ -59,25 +60,21 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; */ public class MetaDataUpdateSettingsService extends AbstractComponent implements ClusterStateListener { - // the value we recognize in the "max" position to mean all the nodes - private static final String ALL_NODES_VALUE = "all"; - private final ClusterService clusterService; private final AllocationService allocationService; - private final DynamicSettings dynamicSettings; - private final IndexNameExpressionResolver indexNameExpressionResolver; + private final IndexScopedSettings indexScopedSettings; @Inject - public MetaDataUpdateSettingsService(Settings settings, ClusterService clusterService, AllocationService allocationService, @IndexDynamicSettings DynamicSettings dynamicSettings, IndexNameExpressionResolver indexNameExpressionResolver) { + public MetaDataUpdateSettingsService(Settings settings, ClusterService clusterService, AllocationService allocationService, IndexScopedSettings indexScopedSettings, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings); this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; this.clusterService.add(this); this.allocationService = allocationService; - this.dynamicSettings = dynamicSettings; + this.indexScopedSettings = indexScopedSettings; } @Override @@ -90,69 +87,43 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements final int dataNodeCount = event.state().nodes().dataNodes().size(); Map> nrReplicasChanged = new HashMap<>(); - // we need to do this each time in case it was changed by update settings for (final IndexMetaData indexMetaData : event.state().metaData()) { - String autoExpandReplicas = indexMetaData.getSettings().get(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS); - if (autoExpandReplicas != null && Booleans.parseBoolean(autoExpandReplicas, true)) { // Booleans only work for false values, just as we want it here - try { - final int min; - final int max; + AutoExpandReplicas autoExpandReplicas = IndexMetaData.INDEX_AUTO_EXPAND_REPLICAS_SETTING.get(indexMetaData.getSettings()); + if (autoExpandReplicas.isEnabled()) { + /* + * we have to expand the number of replicas for this index to at least min and at most max nodes here + * so we are bumping it up if we have to or reduce it depending on min/max and the number of datanodes. + * If we change the number of replicas we just let the shard allocator do it's thing once we updated it + * since it goes through the index metadata to figure out if something needs to be done anyway. Do do that + * we issue a cluster settings update command below and kicks off a reroute. + */ + final int min = autoExpandReplicas.getMinReplicas(); + final int max = autoExpandReplicas.getMaxReplicas(dataNodeCount); + int numberOfReplicas = dataNodeCount - 1; + if (numberOfReplicas < min) { + numberOfReplicas = min; + } else if (numberOfReplicas > max) { + numberOfReplicas = max; + } + // same value, nothing to do there + if (numberOfReplicas == indexMetaData.getNumberOfReplicas()) { + continue; + } - final int dash = autoExpandReplicas.indexOf('-'); - if (-1 == dash) { - logger.warn("failed to set [{}] for index [{}], it should be dash delimited [{}]", - IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.getIndex(), autoExpandReplicas); - continue; - } - final String sMin = autoExpandReplicas.substring(0, dash); - try { - min = Integer.parseInt(sMin); - } catch (NumberFormatException e) { - logger.warn("failed to set [{}] for index [{}], minimum value is not a number [{}]", - e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.getIndex(), sMin); - continue; - } - String sMax = autoExpandReplicas.substring(dash + 1); - if (sMax.equals(ALL_NODES_VALUE)) { - max = dataNodeCount - 1; - } else { - try { - max = Integer.parseInt(sMax); - } catch (NumberFormatException e) { - logger.warn("failed to set [{}] for index [{}], maximum value is neither [{}] nor a number [{}]", - e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.getIndex(), ALL_NODES_VALUE, sMax); - continue; - } + if (numberOfReplicas >= min && numberOfReplicas <= max) { + + if (!nrReplicasChanged.containsKey(numberOfReplicas)) { + nrReplicasChanged.put(numberOfReplicas, new ArrayList<>()); } - int numberOfReplicas = dataNodeCount - 1; - if (numberOfReplicas < min) { - numberOfReplicas = min; - } else if (numberOfReplicas > max) { - numberOfReplicas = max; - } - - // same value, nothing to do there - if (numberOfReplicas == indexMetaData.getNumberOfReplicas()) { - continue; - } - - if (numberOfReplicas >= min && numberOfReplicas <= max) { - - if (!nrReplicasChanged.containsKey(numberOfReplicas)) { - nrReplicasChanged.put(numberOfReplicas, new ArrayList()); - } - - nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex()); - } - } catch (Exception e) { - logger.warn("[{}] failed to parse auto expand replicas", e, indexMetaData.getIndex()); + nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex()); } } } if (nrReplicasChanged.size() > 0) { + // update settings and kick of a reroute (implicit) for them to take effect for (final Integer fNumberOfReplicas : nrReplicasChanged.keySet()) { Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, fNumberOfReplicas).build(); final List indices = nrReplicasChanged.get(fNumberOfReplicas); @@ -182,42 +153,30 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements } public void updateSettings(final UpdateSettingsClusterStateUpdateRequest request, final ActionListener listener) { - Settings.Builder updatedSettingsBuilder = Settings.settingsBuilder(); - updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX); + final Settings normalizedSettings = Settings.settingsBuilder().put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX).build(); + Settings.Builder settingsForClosedIndices = Settings.builder(); + Settings.Builder settingsForOpenIndices = Settings.builder(); + Settings.Builder skipppedSettings = Settings.builder(); + + indexScopedSettings.validate(normalizedSettings); // never allow to change the number of shards - for (String key : updatedSettingsBuilder.internalMap().keySet()) { - if (key.equals(IndexMetaData.SETTING_NUMBER_OF_SHARDS)) { + for (Map.Entry entry : normalizedSettings.getAsMap().entrySet()) { + if (entry.getKey().equals(IndexMetaData.SETTING_NUMBER_OF_SHARDS)) { listener.onFailure(new IllegalArgumentException("can't change the number of shards for an index")); return; } - } - - final Settings closeSettings = updatedSettingsBuilder.build(); - - final Set removedSettings = new HashSet<>(); - final Set errors = new HashSet<>(); - for (Map.Entry setting : updatedSettingsBuilder.internalMap().entrySet()) { - if (!dynamicSettings.hasDynamicSetting(setting.getKey())) { - removedSettings.add(setting.getKey()); + Setting setting = indexScopedSettings.get(entry.getKey()); + assert setting != null; // we already validated the normalized settings + settingsForClosedIndices.put(entry.getKey(), entry.getValue()); + if (setting.isDynamic()) { + settingsForOpenIndices.put(entry.getKey(), entry.getValue()); } else { - String error = dynamicSettings.validateDynamicSetting(setting.getKey(), setting.getValue(), clusterService.state()); - if (error != null) { - errors.add("[" + setting.getKey() + "] - " + error); - } + skipppedSettings.put(entry.getKey(), entry.getValue()); } } - - if (!errors.isEmpty()) { - listener.onFailure(new IllegalArgumentException("can't process the settings: " + errors.toString())); - return; - } - - if (!removedSettings.isEmpty()) { - for (String removedSetting : removedSettings) { - updatedSettingsBuilder.remove(removedSetting); - } - } - final Settings openSettings = updatedSettingsBuilder.build(); + final Settings skippedSettigns = skipppedSettings.build(); + final Settings closedSettings = settingsForClosedIndices.build(); + final Settings openSettings = settingsForOpenIndices.build(); clusterService.submitStateUpdateTask("update-settings", new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { @@ -245,16 +204,16 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements } } - if (closeIndices.size() > 0 && closeSettings.get(IndexMetaData.SETTING_NUMBER_OF_REPLICAS) != null) { + if (closeIndices.size() > 0 && closedSettings.get(IndexMetaData.SETTING_NUMBER_OF_REPLICAS) != null) { throw new IllegalArgumentException(String.format(Locale.ROOT, "Can't update [%s] on closed indices [%s] - can leave index in an unopenable state", IndexMetaData.SETTING_NUMBER_OF_REPLICAS, closeIndices )); } - if (!removedSettings.isEmpty() && !openIndices.isEmpty()) { + if (!skippedSettigns.getAsMap().isEmpty() && !openIndices.isEmpty()) { throw new IllegalArgumentException(String.format(Locale.ROOT, "Can't update non dynamic settings[%s] for open indices [%s]", - removedSettings, + skippedSettigns.getAsMap().keySet(), openIndices )); } @@ -267,57 +226,37 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements } ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - Boolean updatedReadOnly = openSettings.getAsBoolean(IndexMetaData.SETTING_READ_ONLY, null); - if (updatedReadOnly != null) { - for (String index : actualIndices) { - if (updatedReadOnly) { - blocks.addIndexBlock(index, IndexMetaData.INDEX_READ_ONLY_BLOCK); - } else { - blocks.removeIndexBlock(index, IndexMetaData.INDEX_READ_ONLY_BLOCK); - } - } - } - Boolean updateMetaDataBlock = openSettings.getAsBoolean(IndexMetaData.SETTING_BLOCKS_METADATA, null); - if (updateMetaDataBlock != null) { - for (String index : actualIndices) { - if (updateMetaDataBlock) { - blocks.addIndexBlock(index, IndexMetaData.INDEX_METADATA_BLOCK); - } else { - blocks.removeIndexBlock(index, IndexMetaData.INDEX_METADATA_BLOCK); - } - } - } - - Boolean updateWriteBlock = openSettings.getAsBoolean(IndexMetaData.SETTING_BLOCKS_WRITE, null); - if (updateWriteBlock != null) { - for (String index : actualIndices) { - if (updateWriteBlock) { - blocks.addIndexBlock(index, IndexMetaData.INDEX_WRITE_BLOCK); - } else { - blocks.removeIndexBlock(index, IndexMetaData.INDEX_WRITE_BLOCK); - } - } - } - - Boolean updateReadBlock = openSettings.getAsBoolean(IndexMetaData.SETTING_BLOCKS_READ, null); - if (updateReadBlock != null) { - for (String index : actualIndices) { - if (updateReadBlock) { - blocks.addIndexBlock(index, IndexMetaData.INDEX_READ_BLOCK); - } else { - blocks.removeIndexBlock(index, IndexMetaData.INDEX_READ_BLOCK); - } - } - } + maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_READ_ONLY_BLOCK, IndexMetaData.INDEX_READ_ONLY_SETTING, openSettings); + maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_METADATA_BLOCK, IndexMetaData.INDEX_BLOCKS_METADATA_SETTING, openSettings); + maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_WRITE_BLOCK, IndexMetaData.INDEX_BLOCKS_WRITE_SETTING, openSettings); + maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_READ_BLOCK, IndexMetaData.INDEX_BLOCKS_READ_SETTING, openSettings); if (!openIndices.isEmpty()) { - String[] indices = openIndices.toArray(new String[openIndices.size()]); - metaDataBuilder.updateSettings(openSettings, indices); + for (String index : openIndices) { + IndexMetaData indexMetaData = metaDataBuilder.get(index); + if (indexMetaData == null) { + throw new IndexNotFoundException(index); + } + Settings.Builder updates = Settings.builder(); + Settings.Builder indexSettings = Settings.builder().put(indexMetaData.getSettings()); + if (indexScopedSettings.updateDynamicSettings(openSettings, indexSettings, updates, index)) { + metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings)); + } + } } if (!closeIndices.isEmpty()) { - String[] indices = closeIndices.toArray(new String[closeIndices.size()]); - metaDataBuilder.updateSettings(closeSettings, indices); + for (String index : closeIndices) { + IndexMetaData indexMetaData = metaDataBuilder.get(index); + if (indexMetaData == null) { + throw new IndexNotFoundException(index); + } + Settings.Builder updates = Settings.builder(); + Settings.Builder indexSettings = Settings.builder().put(indexMetaData.getSettings()); + if (indexScopedSettings.updateSettings(closedSettings, indexSettings, updates, index)) { + metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings)); + } + } } @@ -326,12 +265,34 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements // now, reroute in case things change that require it (like number of replicas) RoutingAllocation.Result routingResult = allocationService.reroute(updatedState, "settings update"); updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build(); - + for (String index : openIndices) { + indexScopedSettings.dryRun(updatedState.metaData().index(index).getSettings()); + } + for (String index : closeIndices) { + indexScopedSettings.dryRun(updatedState.metaData().index(index).getSettings()); + } return updatedState; } }); } + /** + * Updates the cluster block only iff the setting exists in the given settings + */ + private static void maybeUpdateClusterBlock(String[] actualIndices, ClusterBlocks.Builder blocks, ClusterBlock block, Setting setting, Settings openSettings) { + if (setting.exists(openSettings)) { + final boolean updateReadBlock = setting.get(openSettings); + for (String index : actualIndices) { + if (updateReadBlock) { + blocks.addIndexBlock(index, block); + } else { + blocks.removeIndexBlock(index, block); + } + } + } + } + + public void upgradeIndexSettings(final UpgradeSettingsClusterStateUpdateRequest request, final ActionListener listener) { diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 0bb64220f1f..d07d3c334ac 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -319,7 +319,7 @@ public class DiscoveryNodes extends AbstractDiffable implements throw new IllegalArgumentException("resolved [" + node + "] into [" + resolvedNodeIds.length + "] nodes, where expected to be resolved to a single node"); } if (resolvedNodeIds.length == 0) { - throw new IllegalArgumentException("failed to resolve [" + node + " ], no matching nodes"); + throw new IllegalArgumentException("failed to resolve [" + node + "], no matching nodes"); } return nodes.get(resolvedNodeIds[0]); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index 0fb7513f73f..4bf196d07d0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -114,6 +114,16 @@ public class RoutingTable implements Iterable, Diffable { public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime"); - - public static final String INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = "index.unassigned.node_left.delayed_timeout"; private static final TimeValue DEFAULT_DELAYED_NODE_LEFT_TIMEOUT = TimeValue.timeValueMinutes(1); + public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, true, Setting.Scope.INDEX); + /** * Reason why the shard is in unassigned state. *

@@ -215,7 +216,7 @@ public class UnassignedInfo implements ToXContent, Writeable { if (reason != Reason.NODE_LEFT) { return 0; } - TimeValue delayTimeout = indexSettings.getAsTime(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, settings.getAsTime(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, DEFAULT_DELAYED_NODE_LEFT_TIMEOUT)); + TimeValue delayTimeout = INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.get(indexSettings, settings); return Math.max(0l, delayTimeout.nanos()); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java index a9ce43c5f76..65a7bd6971a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java @@ -32,7 +32,7 @@ import org.elasticsearch.gateway.GatewayAllocator; /** * The {@link ShardsAllocator} class offers methods for allocating shard within a cluster. * These methods include moving shards and re-balancing the cluster. It also allows management - * of shards by their state. + * of shards by their state. */ public class ShardsAllocators extends AbstractComponent implements ShardsAllocator { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java new file mode 100644 index 00000000000..31fc51a8979 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java @@ -0,0 +1,240 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.command; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.StreamableReader; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.shard.ShardId; + +import java.io.IOException; +import java.util.function.Consumer; + +/** + * Abstract base class for allocating an unassigned shard to a node + */ +public abstract class AbstractAllocateAllocationCommand implements AllocationCommand, ToXContent { + + private static final String INDEX_KEY = "index"; + private static final String SHARD_KEY = "shard"; + private static final String NODE_KEY = "node"; + + protected static ObjectParser createAllocateParser(String command) { + ObjectParser parser = new ObjectParser<>(command); + parser.declareString(Builder::setIndex, new ParseField(INDEX_KEY)); + parser.declareInt(Builder::setShard, new ParseField(SHARD_KEY)); + parser.declareString(Builder::setNode, new ParseField(NODE_KEY)); + return parser; + } + + protected static abstract class Builder implements StreamableReader> { + protected String index; + protected int shard = -1; + protected String node; + + public void setIndex(String index) { + this.index = index; + } + + public void setShard(int shard) { + this.shard = shard; + } + + public void setNode(String node) { + this.node = node; + } + + @Override + public Builder readFrom(StreamInput in) throws IOException { + index = in.readString(); + shard = in.readVInt(); + node = in.readString(); + return this; + } + + public abstract Builder parse(XContentParser parser) throws IOException; + + public abstract T build(); + + protected void validate() { + if (index == null) { + throw new IllegalArgumentException("Argument [index] must be defined"); + } + if (shard < 0) { + throw new IllegalArgumentException("Argument [shard] must be defined and non-negative"); + } + if (node == null) { + throw new IllegalArgumentException("Argument [node] must be defined"); + } + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.field(INDEX_KEY, shardId().index().name()); + builder.field(SHARD_KEY, shardId().id()); + builder.field(NODE_KEY, node()); + return builder; + } + + public void writeTo(StreamOutput out) throws IOException { + out.writeString(shardId.getIndex()); + out.writeVInt(shardId.getId()); + out.writeString(node); + } + + public static abstract class Factory implements AllocationCommand.Factory { + + protected abstract Builder newBuilder(); + + @Override + public T readFrom(StreamInput in) throws IOException { + return newBuilder().readFrom(in).build(); + } + + @Override + public void writeTo(T command, StreamOutput out) throws IOException { + command.writeTo(out); + } + + @Override + public T fromXContent(XContentParser parser) throws IOException { + return newBuilder().parse(parser).build(); + } + + @Override + public void toXContent(T command, XContentBuilder builder, ToXContent.Params params, String objectName) throws IOException { + if (objectName == null) { + builder.startObject(); + } else { + builder.startObject(objectName); + } + builder.endObject(); + } + } + + protected final ShardId shardId; + protected final String node; + + protected AbstractAllocateAllocationCommand(ShardId shardId, String node) { + this.shardId = shardId; + this.node = node; + } + + /** + * Get the shard id + * + * @return id of the shard + */ + public ShardId shardId() { + return this.shardId; + } + + /** + * Get the id of the node + * + * @return id of the node + */ + public String node() { + return this.node; + } + + /** + * Handle case where a disco node cannot be found in the routing table. Usually means that it's not a data node. + */ + protected RerouteExplanation explainOrThrowMissingRoutingNode(RoutingAllocation allocation, boolean explain, DiscoveryNode discoNode) { + if (!discoNode.dataNode()) { + return explainOrThrowRejectedCommand(explain, allocation, "allocation can only be done on data nodes, not [" + node + "]"); + } else { + return explainOrThrowRejectedCommand(explain, allocation, "could not find [" + node + "] among the routing nodes"); + } + } + + /** + * Utility method for rejecting the current allocation command based on provided reason + */ + protected RerouteExplanation explainOrThrowRejectedCommand(boolean explain, RoutingAllocation allocation, String reason) { + if (explain) { + return new RerouteExplanation(this, allocation.decision(Decision.NO, name() + " (allocation command)", reason)); + } + throw new IllegalArgumentException("[" + name() + "] " + reason); + } + + /** + * Utility method for rejecting the current allocation command based on provided exception + */ + protected RerouteExplanation explainOrThrowRejectedCommand(boolean explain, RoutingAllocation allocation, RuntimeException rte) { + if (explain) { + return new RerouteExplanation(this, allocation.decision(Decision.NO, name() + " (allocation command)", rte.getMessage())); + } + throw rte; + } + + /** + * Initializes an unassigned shard on a node and removes it from the unassigned + * + * @param allocation the allocation + * @param routingNodes the routing nodes + * @param routingNode the node to initialize it to + * @param shardRouting the shard routing that is to be matched in unassigned shards + */ + protected void initializeUnassignedShard(RoutingAllocation allocation, RoutingNodes routingNodes, RoutingNode routingNode, ShardRouting shardRouting) { + initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, null); + } + + /** + * Initializes an unassigned shard on a node and removes it from the unassigned + * + * @param allocation the allocation + * @param routingNodes the routing nodes + * @param routingNode the node to initialize it to + * @param shardRouting the shard routing that is to be matched in unassigned shards + * @param shardRoutingChanges changes to apply for shard routing in unassigned shards before initialization + */ + protected void initializeUnassignedShard(RoutingAllocation allocation, RoutingNodes routingNodes, RoutingNode routingNode, + ShardRouting shardRouting, @Nullable Consumer shardRoutingChanges) { + for (RoutingNodes.UnassignedShards.UnassignedIterator it = routingNodes.unassigned().iterator(); it.hasNext(); ) { + ShardRouting unassigned = it.next(); + if (!unassigned.equalsIgnoringMetaData(shardRouting)) { + continue; + } + if (shardRoutingChanges != null) { + shardRoutingChanges.accept(unassigned); + } + it.initialize(routingNode.nodeId(), unassigned.version(), + allocation.clusterInfo().getShardSize(unassigned, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); + return; + } + assert false : "shard to initialize not found in list of unassigned shards"; + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateAllocationCommand.java deleted file mode 100644 index 5646d308dda..00000000000 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateAllocationCommand.java +++ /dev/null @@ -1,240 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.routing.allocation.command; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.cluster.routing.RoutingNodes; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; -import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.decider.Decision; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; - -/** - * Allocates an unassigned shard to a specific node. Note, primary allocation will "force" - * allocation which might mean one will loose data if using local gateway..., use with care - * with the allowPrimary flag. - */ -public class AllocateAllocationCommand implements AllocationCommand { - - public static final String NAME = "allocate"; - - public static class Factory implements AllocationCommand.Factory { - - @Override - public AllocateAllocationCommand readFrom(StreamInput in) throws IOException { - return new AllocateAllocationCommand(ShardId.readShardId(in), in.readString(), in.readBoolean()); - } - - @Override - public void writeTo(AllocateAllocationCommand command, StreamOutput out) throws IOException { - command.shardId().writeTo(out); - out.writeString(command.node()); - out.writeBoolean(command.allowPrimary()); - } - - @Override - public AllocateAllocationCommand fromXContent(XContentParser parser) throws IOException { - String index = null; - int shardId = -1; - String nodeId = null; - boolean allowPrimary = false; - - String currentFieldName = null; - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if ("index".equals(currentFieldName)) { - index = parser.text(); - } else if ("shard".equals(currentFieldName)) { - shardId = parser.intValue(); - } else if ("node".equals(currentFieldName)) { - nodeId = parser.text(); - } else if ("allow_primary".equals(currentFieldName) || "allowPrimary".equals(currentFieldName)) { - allowPrimary = parser.booleanValue(); - } else { - throw new ElasticsearchParseException("[{}] command does not support field [{}]", NAME, currentFieldName); - } - } else { - throw new ElasticsearchParseException("[{}] command does not support complex json tokens [{}]", NAME, token); - } - } - if (index == null) { - throw new ElasticsearchParseException("[{}] command missing the index parameter", NAME); - } - if (shardId == -1) { - throw new ElasticsearchParseException("[{}] command missing the shard parameter", NAME); - } - if (nodeId == null) { - throw new ElasticsearchParseException("[{}] command missing the node parameter", NAME); - } - return new AllocateAllocationCommand(new ShardId(index, shardId), nodeId, allowPrimary); - } - - @Override - public void toXContent(AllocateAllocationCommand command, XContentBuilder builder, ToXContent.Params params, String objectName) throws IOException { - if (objectName == null) { - builder.startObject(); - } else { - builder.startObject(objectName); - } - builder.field("index", command.shardId().index().name()); - builder.field("shard", command.shardId().id()); - builder.field("node", command.node()); - builder.field("allow_primary", command.allowPrimary()); - builder.endObject(); - } - } - - private final ShardId shardId; - private final String node; - private final boolean allowPrimary; - - /** - * Create a new {@link AllocateAllocationCommand} - * - * @param shardId {@link ShardId} of the shrad to assign - * @param node Node to assign the shard to - * @param allowPrimary should the node be allow to allocate the shard as primary - */ - public AllocateAllocationCommand(ShardId shardId, String node, boolean allowPrimary) { - this.shardId = shardId; - this.node = node; - this.allowPrimary = allowPrimary; - } - - @Override - public String name() { - return NAME; - } - - /** - * Get the shards id - * - * @return id of the shard - */ - public ShardId shardId() { - return this.shardId; - } - - /** - * Get the id of the Node - * - * @return id of the Node - */ - public String node() { - return this.node; - } - - /** - * Determine if primary allocation is allowed - * - * @return true if primary allocation is allowed. Otherwise false - */ - public boolean allowPrimary() { - return this.allowPrimary; - } - - @Override - public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) { - final DiscoveryNode discoNode = allocation.nodes().resolveNode(node); - final RoutingNodes routingNodes = allocation.routingNodes(); - - ShardRouting shardRouting = null; - for (ShardRouting routing : routingNodes.unassigned()) { - if (routing.shardId().equals(shardId)) { - // prefer primaries first to allocate - if (shardRouting == null || routing.primary()) { - shardRouting = routing; - } - } - } - - if (shardRouting == null) { - if (explain) { - return new RerouteExplanation(this, allocation.decision(Decision.NO, "allocate_allocation_command", - "failed to find " + shardId + " on the list of unassigned shards")); - } - throw new IllegalArgumentException("[allocate] failed to find " + shardId + " on the list of unassigned shards"); - } - - if (shardRouting.primary() && !allowPrimary) { - if (explain) { - return new RerouteExplanation(this, allocation.decision(Decision.NO, "allocate_allocation_command", - "trying to allocate a primary shard " + shardId + ", which is disabled")); - } - throw new IllegalArgumentException("[allocate] trying to allocate a primary shard " + shardId + ", which is disabled"); - } - - RoutingNode routingNode = routingNodes.node(discoNode.id()); - if (routingNode == null) { - if (!discoNode.dataNode()) { - if (explain) { - return new RerouteExplanation(this, allocation.decision(Decision.NO, "allocate_allocation_command", - "Allocation can only be done on data nodes, not [" + node + "]")); - } - throw new IllegalArgumentException("Allocation can only be done on data nodes, not [" + node + "]"); - } else { - if (explain) { - return new RerouteExplanation(this, allocation.decision(Decision.NO, "allocate_allocation_command", - "Could not find [" + node + "] among the routing nodes")); - } - throw new IllegalStateException("Could not find [" + node + "] among the routing nodes"); - } - } - - Decision decision = allocation.deciders().canAllocate(shardRouting, routingNode, allocation); - if (decision.type() == Decision.Type.NO) { - if (explain) { - return new RerouteExplanation(this, decision); - } - throw new IllegalArgumentException("[allocate] allocation of " + shardId + " on node " + discoNode + " is not allowed, reason: " + decision); - } - // go over and remove it from the unassigned - for (RoutingNodes.UnassignedShards.UnassignedIterator it = routingNodes.unassigned().iterator(); it.hasNext(); ) { - ShardRouting unassigned = it.next(); - if (unassigned != shardRouting) { - continue; - } - // if we force allocation of a primary, we need to move the unassigned info back to treat it as if - // it was index creation - if (unassigned.primary() && unassigned.unassignedInfo().getReason() != UnassignedInfo.Reason.INDEX_CREATED) { - unassigned.updateUnassignedInfo(new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, - "force allocation from previous reason " + unassigned.unassignedInfo().getReason() + ", " + unassigned.unassignedInfo().getMessage(), - unassigned.unassignedInfo().getFailure(), System.nanoTime(), System.currentTimeMillis())); - } - it.initialize(routingNode.nodeId(), unassigned.version(), allocation.clusterInfo().getShardSize(unassigned, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); - break; - } - return new RerouteExplanation(this, decision); - } -} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java new file mode 100644 index 00000000000..f607755bca1 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java @@ -0,0 +1,125 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.command; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; + +import java.io.IOException; + +/** + * Allocates an unassigned empty primary shard to a specific node. Use with extreme care as this will result in data loss. + * Allocation deciders are ignored. + */ +public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocationCommand { + public static final String NAME = "allocate_empty_primary"; + + private static final ObjectParser EMPTY_PRIMARY_PARSER = BasePrimaryAllocationCommand.createAllocatePrimaryParser(NAME); + + /** + * Creates a new {@link AllocateEmptyPrimaryAllocationCommand} + * + * @param shardId {@link ShardId} of the shard to assign + * @param node node id of the node to assign the shard to + * @param acceptDataLoss whether the user agrees to data loss + */ + public AllocateEmptyPrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) { + super(shardId, node, acceptDataLoss); + } + + @Override + public String name() { + return NAME; + } + + public static class Builder extends BasePrimaryAllocationCommand.Builder { + + @Override + public Builder parse(XContentParser parser) throws IOException { + return EMPTY_PRIMARY_PARSER.parse(parser, this); + } + + @Override + public AllocateEmptyPrimaryAllocationCommand build() { + validate(); + return new AllocateEmptyPrimaryAllocationCommand(new ShardId(index, shard), node, acceptDataLoss); + } + } + + public static class Factory extends AbstractAllocateAllocationCommand.Factory { + + @Override + protected Builder newBuilder() { + return new Builder(); + } + } + + @Override + public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) { + final DiscoveryNode discoNode; + try { + discoNode = allocation.nodes().resolveNode(node); + } catch (IllegalArgumentException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + final RoutingNodes routingNodes = allocation.routingNodes(); + RoutingNode routingNode = routingNodes.node(discoNode.id()); + if (routingNode == null) { + return explainOrThrowMissingRoutingNode(allocation, explain, discoNode); + } + + final ShardRouting shardRouting; + try { + shardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard(); + } catch (IndexNotFoundException | ShardNotFoundException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + if (shardRouting.unassigned() == false) { + return explainOrThrowRejectedCommand(explain, allocation, "primary " + shardId + " is already assigned"); + } + + if (shardRouting.unassignedInfo().getReason() != UnassignedInfo.Reason.INDEX_CREATED && acceptDataLoss == false) { + return explainOrThrowRejectedCommand(explain, allocation, + "allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true"); + } + + initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, + shr -> { + if (shr.unassignedInfo().getReason() != UnassignedInfo.Reason.INDEX_CREATED) { + // we need to move the unassigned info back to treat it as if it was index creation + shr.updateUnassignedInfo(new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, + "force empty allocation from previous reason " + shardRouting.unassignedInfo().getReason() + ", " + shardRouting.unassignedInfo().getMessage(), + shardRouting.unassignedInfo().getFailure(), System.nanoTime(), System.currentTimeMillis())); + } + }); + return new RerouteExplanation(this, allocation.decision(Decision.YES, name() + " (allocation command)", "ignore deciders")); + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java new file mode 100644 index 00000000000..f9d443a6618 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.command; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; + +import java.io.IOException; +import java.util.List; + +/** + * Allocates an unassigned replica shard to a specific node. Checks if allocation deciders allow allocation. + */ +public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocationCommand { + public static final String NAME = "allocate_replica"; + + private static final ObjectParser REPLICA_PARSER = createAllocateParser(NAME); + + /** + * Creates a new {@link AllocateReplicaAllocationCommand} + * + * @param shardId {@link ShardId} of the shard to assign + * @param node node id of the node to assign the shard to + */ + public AllocateReplicaAllocationCommand(ShardId shardId, String node) { + super(shardId, node); + } + + @Override + public String name() { + return NAME; + } + + protected static class Builder extends AbstractAllocateAllocationCommand.Builder { + + @Override + public Builder parse(XContentParser parser) throws IOException { + return REPLICA_PARSER.parse(parser, this); + } + + @Override + public AllocateReplicaAllocationCommand build() { + validate(); + return new AllocateReplicaAllocationCommand(new ShardId(index, shard), node); + } + } + + public static class Factory extends AbstractAllocateAllocationCommand.Factory { + @Override + protected Builder newBuilder() { + return new Builder(); + } + } + + @Override + public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) { + final DiscoveryNode discoNode; + try { + discoNode = allocation.nodes().resolveNode(node); + } catch (IllegalArgumentException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + final RoutingNodes routingNodes = allocation.routingNodes(); + RoutingNode routingNode = routingNodes.node(discoNode.id()); + if (routingNode == null) { + return explainOrThrowMissingRoutingNode(allocation, explain, discoNode); + } + + final ShardRouting primaryShardRouting; + try { + primaryShardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard(); + } catch (IndexNotFoundException | ShardNotFoundException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + if (primaryShardRouting.unassigned()) { + return explainOrThrowRejectedCommand(explain, allocation, + "trying to allocate a replica shard " + shardId + ", while corresponding primary shard is still unassigned"); + } + + List replicaShardRoutings = allocation.routingTable().shardRoutingTable(shardId).replicaShardsWithState(ShardRoutingState.UNASSIGNED); + ShardRouting shardRouting; + if (replicaShardRoutings.isEmpty()) { + return explainOrThrowRejectedCommand(explain, allocation, + "all copies of " + shardId +" are already assigned. Use the move allocation command instead"); + } else { + shardRouting = replicaShardRoutings.get(0); + } + + Decision decision = allocation.deciders().canAllocate(shardRouting, routingNode, allocation); + if (decision.type() == Decision.Type.NO) { + // don't use explainOrThrowRejectedCommand to keep the original "NO" decision + if (explain) { + return new RerouteExplanation(this, decision); + } + throw new IllegalArgumentException("[" + name() + "] allocation of " + shardId + " on node " + discoNode + " is not allowed, reason: " + decision); + } + + initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting); + return new RerouteExplanation(this, decision); + } + + +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java new file mode 100644 index 00000000000..22cedfc6aa3 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java @@ -0,0 +1,124 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.command; + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; + +import java.io.IOException; + +/** + * Allocates an unassigned stale primary shard to a specific node. Use with extreme care as this will result in data loss. + * Allocation deciders are ignored. + */ +public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocationCommand { + public static final String NAME = "allocate_stale_primary"; + + private static final ObjectParser STALE_PRIMARY_PARSER = BasePrimaryAllocationCommand.createAllocatePrimaryParser(NAME); + + /** + * Creates a new {@link AllocateStalePrimaryAllocationCommand} + * + * @param shardId {@link ShardId} of the shard to assign + * @param node node id of the node to assign the shard to + * @param acceptDataLoss whether the user agrees to data loss + */ + public AllocateStalePrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) { + super(shardId, node, acceptDataLoss); + } + + @Override + public String name() { + return NAME; + } + + public static class Builder extends BasePrimaryAllocationCommand.Builder { + + @Override + public Builder parse(XContentParser parser) throws IOException { + return STALE_PRIMARY_PARSER.parse(parser, this); + } + + @Override + public AllocateStalePrimaryAllocationCommand build() { + validate(); + return new AllocateStalePrimaryAllocationCommand(new ShardId(index, shard), node, acceptDataLoss); + } + } + + public static class Factory extends AbstractAllocateAllocationCommand.Factory { + + @Override + protected Builder newBuilder() { + return new Builder(); + } + } + + @Override + public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) { + final DiscoveryNode discoNode; + try { + discoNode = allocation.nodes().resolveNode(node); + } catch (IllegalArgumentException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + final RoutingNodes routingNodes = allocation.routingNodes(); + RoutingNode routingNode = routingNodes.node(discoNode.id()); + if (routingNode == null) { + return explainOrThrowMissingRoutingNode(allocation, explain, discoNode); + } + + final ShardRouting shardRouting; + try { + shardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard(); + } catch (IndexNotFoundException | ShardNotFoundException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + if (shardRouting.unassigned() == false) { + return explainOrThrowRejectedCommand(explain, allocation, "primary " + shardId + " is already assigned"); + } + + if (acceptDataLoss == false) { + return explainOrThrowRejectedCommand(explain, allocation, + "allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true"); + } + + final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); + if (shardRouting.allocatedPostIndexCreate(indexMetaData) == false) { + return explainOrThrowRejectedCommand(explain, allocation, + "trying to allocate an existing primary shard " + shardId + ", while no such shard has ever been active"); + } + + initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting); + return new RerouteExplanation(this, allocation.decision(Decision.YES, name() + " (allocation command)", "ignore deciders")); + } + +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java index 3e17ce68584..3674f56a949 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java @@ -67,7 +67,9 @@ public class AllocationCommands { } static { - registerFactory(AllocateAllocationCommand.NAME, new AllocateAllocationCommand.Factory()); + registerFactory(AllocateEmptyPrimaryAllocationCommand.NAME, new AllocateEmptyPrimaryAllocationCommand.Factory()); + registerFactory(AllocateStalePrimaryAllocationCommand.NAME, new AllocateStalePrimaryAllocationCommand.Factory()); + registerFactory(AllocateReplicaAllocationCommand.NAME, new AllocateReplicaAllocationCommand.Factory()); registerFactory(CancelAllocationCommand.NAME, new CancelAllocationCommand.Factory()); registerFactory(MoveAllocationCommand.NAME, new MoveAllocationCommand.Factory()); } @@ -76,7 +78,7 @@ public class AllocationCommands { /** * Creates a new set of {@link AllocationCommands} - * + * * @param commands {@link AllocationCommand}s that are wrapped by this instance */ public AllocationCommands(AllocationCommand... commands) { @@ -122,7 +124,7 @@ public class AllocationCommands { * Reads a {@link AllocationCommands} from a {@link StreamInput} * @param in {@link StreamInput} to read from * @return {@link AllocationCommands} read - * + * * @throws IOException if something happens during read */ public static AllocationCommands readFrom(StreamInput in) throws IOException { @@ -137,7 +139,7 @@ public class AllocationCommands { /** * Writes {@link AllocationCommands} to a {@link StreamOutput} - * + * * @param commands Commands to write * @param out {@link StreamOutput} to write the commands to * @throws IOException if something happens during write @@ -149,7 +151,7 @@ public class AllocationCommands { lookupFactorySafe(command.name()).writeTo(command, out); } } - + /** * Reads {@link AllocationCommands} from a {@link XContentParser} *

@@ -161,7 +163,7 @@ public class AllocationCommands {
      * 
* @param parser {@link XContentParser} to read the commands from * @return {@link AllocationCommands} read - * @throws IOException if something bad happens while reading the stream + * @throws IOException if something bad happens while reading the stream */ public static AllocationCommands fromXContent(XContentParser parser) throws IOException { AllocationCommands commands = new AllocationCommands(); @@ -203,10 +205,10 @@ public class AllocationCommands { } return commands; } - + /** * Writes {@link AllocationCommands} to a {@link XContentBuilder} - * + * * @param commands {@link AllocationCommands} to write * @param builder {@link XContentBuilder} to use * @param params Parameters to use for building diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java new file mode 100644 index 00000000000..35c1711d646 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.command; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.shard.ShardId; + +import java.io.IOException; + +/** + * Abstract base class for allocating an unassigned primary shard to a node + */ +public abstract class BasePrimaryAllocationCommand extends AbstractAllocateAllocationCommand { + + private static final String ACCEPT_DATA_LOSS_KEY = "accept_data_loss"; + + protected static ObjectParser createAllocatePrimaryParser(String command) { + ObjectParser parser = AbstractAllocateAllocationCommand.createAllocateParser(command); + parser.declareBoolean(Builder::setAcceptDataLoss, new ParseField(ACCEPT_DATA_LOSS_KEY)); + return parser; + } + + protected final boolean acceptDataLoss; + + protected BasePrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) { + super(shardId, node); + this.acceptDataLoss = acceptDataLoss; + } + + /** + * The operation only executes if the user explicitly agrees to possible data loss + * + * @return whether data loss is acceptable + */ + public boolean acceptDataLoss() { + return acceptDataLoss; + } + + protected static abstract class Builder extends AbstractAllocateAllocationCommand.Builder { + protected boolean acceptDataLoss; + + public void setAcceptDataLoss(boolean acceptDataLoss) { + this.acceptDataLoss = acceptDataLoss; + } + + @Override + public Builder readFrom(StreamInput in) throws IOException { + super.readFrom(in); + acceptDataLoss = in.readBoolean(); + return this; + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + super.toXContent(builder, params); + builder.field(ACCEPT_DATA_LOSS_KEY, acceptDataLoss); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(acceptDataLoss); + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java index 7554fa4c46f..c485cb3eab5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java @@ -125,7 +125,7 @@ public class CancelAllocationCommand implements AllocationCommand { /** * Creates a new {@link CancelAllocationCommand} - * + * * @param shardId id of the shard which allocation should be canceled * @param node id of the node that manages the shard which allocation should be canceled */ diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index a31d36db349..3c2e649387a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -32,7 +32,7 @@ import java.util.Locale; /** * This allocation decider allows shard allocations / rebalancing via the cluster wide settings {@link #CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING} / - * {@link #CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} and the per index setting {@link #INDEX_ROUTING_ALLOCATION_ENABLE} / {@link #INDEX_ROUTING_REBALANCE_ENABLE}. + * {@link #CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} and the per index setting {@link #INDEX_ROUTING_ALLOCATION_ENABLE_SETTING} / {@link #INDEX_ROUTING_REBALANCE_ENABLE_SETTING}. * The per index settings overrides the cluster wide setting. * *

@@ -61,10 +61,10 @@ public class EnableAllocationDecider extends AllocationDecider { public static final String NAME = "enable"; public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.CLUSTER); - public static final String INDEX_ROUTING_ALLOCATION_ENABLE= "index.routing.allocation.enable"; + public static final Setting INDEX_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("index.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.INDEX); public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.CLUSTER); - public static final String INDEX_ROUTING_REBALANCE_ENABLE = "index.routing.rebalance.enable"; + public static final Setting INDEX_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("index.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.INDEX); private volatile Rebalance enableRebalance; private volatile Allocation enableAllocation; @@ -92,11 +92,10 @@ public class EnableAllocationDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "allocation disabling is ignored"); } - IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); - String enableIndexValue = indexMetaData.getSettings().get(INDEX_ROUTING_ALLOCATION_ENABLE); + final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); final Allocation enable; - if (enableIndexValue != null) { - enable = Allocation.parse(enableIndexValue); + if (INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.exists(indexMetaData.getSettings())) { + enable = INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.get(indexMetaData.getSettings()); } else { enable = this.enableAllocation; } @@ -129,10 +128,9 @@ public class EnableAllocationDecider extends AllocationDecider { } Settings indexSettings = allocation.routingNodes().metaData().index(shardRouting.index()).getSettings(); - String enableIndexValue = indexSettings.get(INDEX_ROUTING_REBALANCE_ENABLE); final Rebalance enable; - if (enableIndexValue != null) { - enable = Rebalance.parse(enableIndexValue); + if (INDEX_ROUTING_REBALANCE_ENABLE_SETTING.exists(indexSettings)) { + enable = INDEX_ROUTING_REBALANCE_ENABLE_SETTING.get(indexSettings); } else { enable = this.enableRebalance; } @@ -160,7 +158,7 @@ public class EnableAllocationDecider extends AllocationDecider { /** * Allocation values or rather their string representation to be used used with - * {@link EnableAllocationDecider#CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_ALLOCATION_ENABLE} + * {@link EnableAllocationDecider#CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_ALLOCATION_ENABLE_SETTING} * via cluster / index settings. */ public enum Allocation { @@ -186,7 +184,7 @@ public class EnableAllocationDecider extends AllocationDecider { /** * Rebalance values or rather their string representation to be used used with - * {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE} + * {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE_SETTING} * via cluster / index settings. */ public enum Rebalance { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index eb9fe10e965..f8ff5f37aed 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -60,10 +60,6 @@ public class FilterAllocationDecider extends AllocationDecider { public static final String NAME = "filter"; - public static final String INDEX_ROUTING_REQUIRE_GROUP = "index.routing.allocation.require."; - public static final String INDEX_ROUTING_INCLUDE_GROUP = "index.routing.allocation.include."; - public static final String INDEX_ROUTING_EXCLUDE_GROUP = "index.routing.allocation.exclude."; - public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.require.", true, Setting.Scope.CLUSTER); public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.include.", true, Setting.Scope.CLUSTER); public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.exclude.", true, Setting.Scope.CLUSTER); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index 9149d04cf60..e766b4c49aa 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -32,12 +32,12 @@ import org.elasticsearch.common.settings.Settings; /** * This {@link AllocationDecider} limits the number of shards per node on a per * index or node-wide basis. The allocator prevents a single node to hold more - * than {@value #INDEX_TOTAL_SHARDS_PER_NODE} per index and + * than index.routing.allocation.total_shards_per_node per index and * cluster.routing.allocation.total_shards_per_node globally during the allocation * process. The limits of this decider can be changed in real-time via a the * index settings API. *

- * If {@value #INDEX_TOTAL_SHARDS_PER_NODE} is reset to a negative value shards + * If index.routing.allocation.total_shards_per_node is reset to a negative value shards * per index are unlimited per node. Shards currently in the * {@link ShardRoutingState#RELOCATING relocating} state are ignored by this * {@link AllocationDecider} until the shard changed its state to either @@ -59,12 +59,13 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { * Controls the maximum number of shards per index on a single Elasticsearch * node. Negative values are interpreted as unlimited. */ - public static final String INDEX_TOTAL_SHARDS_PER_NODE = "index.routing.allocation.total_shards_per_node"; + public static final Setting INDEX_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("index.routing.allocation.total_shards_per_node", -1, -1, true, Setting.Scope.INDEX); + /** * Controls the maximum number of shards per node on a global level. * Negative values are interpreted as unlimited. */ - public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, -1, true, Setting.Scope.CLUSTER); @Inject @@ -81,7 +82,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { IndexMetaData indexMd = allocation.routingNodes().metaData().index(shardRouting.index()); - int indexShardLimit = indexMd.getSettings().getAsInt(INDEX_TOTAL_SHARDS_PER_NODE, -1); + final int indexShardLimit = INDEX_TOTAL_SHARDS_PER_NODE_SETTING.get(indexMd.getSettings(), settings); // Capture the limit here in case it changes during this method's // execution final int clusterShardLimit = this.clusterShardLimit; @@ -118,7 +119,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { @Override public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { IndexMetaData indexMd = allocation.routingNodes().metaData().index(shardRouting.index()); - int indexShardLimit = indexMd.getSettings().getAsInt(INDEX_TOTAL_SHARDS_PER_NODE, -1); + final int indexShardLimit = INDEX_TOTAL_SHARDS_PER_NODE_SETTING.get(indexMd.getSettings(), settings); // Capture the limit here in case it changes during this method's // execution final int clusterShardLimit = this.clusterShardLimit; diff --git a/core/src/main/java/org/elasticsearch/cluster/settings/DynamicSettings.java b/core/src/main/java/org/elasticsearch/cluster/settings/DynamicSettings.java deleted file mode 100644 index 7c6dec2c1b7..00000000000 --- a/core/src/main/java/org/elasticsearch/cluster/settings/DynamicSettings.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.settings; - -import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.regex.Regex; - -/** - * A container for setting names and validation methods for those settings. - */ -public class DynamicSettings { - private final ImmutableOpenMap dynamicSettings; - - public static class Builder { - private ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); - - public void addSetting(String setting, Validator validator) { - Validator old = settings.put(setting, validator); - if (old != null) { - throw new IllegalArgumentException("Cannot register setting [" + setting + "] twice"); - } - } - - public DynamicSettings build() { - return new DynamicSettings(settings.build()); - } - } - - private DynamicSettings(ImmutableOpenMap settings) { - this.dynamicSettings = settings; - } - - public boolean isDynamicOrLoggingSetting(String key) { - return hasDynamicSetting(key) || key.startsWith("logger."); - } - - public boolean hasDynamicSetting(String key) { - for (ObjectCursor dynamicSetting : dynamicSettings.keys()) { - if (Regex.simpleMatch(dynamicSetting.value, key)) { - return true; - } - } - return false; - } - - public String validateDynamicSetting(String dynamicSetting, String value, ClusterState clusterState) { - for (ObjectObjectCursor setting : dynamicSettings) { - if (Regex.simpleMatch(setting.key, dynamicSetting)) { - return setting.value.validate(dynamicSetting, value, clusterState); - } - } - return null; - } -} diff --git a/core/src/main/java/org/elasticsearch/cluster/settings/Validator.java b/core/src/main/java/org/elasticsearch/cluster/settings/Validator.java deleted file mode 100644 index cb253dceadf..00000000000 --- a/core/src/main/java/org/elasticsearch/cluster/settings/Validator.java +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.settings; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.unit.TimeValue; - -import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; -import static org.elasticsearch.common.unit.MemorySizeValue.parseBytesSizeValueOrHeapRatio; - - -/** - * Validates a setting, returning a failure message if applicable. - */ -public interface Validator { - - String validate(String setting, String value, ClusterState clusterState); - - Validator EMPTY = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - return null; - } - }; - - Validator TIME = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - if (value == null) { - throw new NullPointerException("value must not be null"); - } - try { - // This never returns null: - TimeValue.parseTimeValue(value, null, setting); - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - Validator TIMEOUT = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (value == null) { - throw new NullPointerException("value must not be null"); - } - TimeValue timeValue = TimeValue.parseTimeValue(value, null, setting); - assert timeValue != null; - if (timeValue.millis() < 0 && timeValue.millis() != -1) { - return "cannot parse value [" + value + "] as a timeout"; - } - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - Validator TIME_NON_NEGATIVE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (value == null) { - throw new NullPointerException("value must not be null"); - } - TimeValue timeValue = TimeValue.parseTimeValue(value, null, setting); - assert timeValue != null; - if (timeValue.millis() < 0) { - return "cannot parse value [" + value + "] as non negative time"; - } - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - Validator FLOAT = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - Float.parseFloat(value); - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as a float"; - } - return null; - } - }; - - Validator NON_NEGATIVE_FLOAT = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Float.parseFloat(value) < 0.0) { - return "the value of the setting " + setting + " must be a non negative float"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as a double"; - } - return null; - } - }; - - Validator DOUBLE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - Double.parseDouble(value); - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as a double"; - } - return null; - } - }; - - Validator NON_NEGATIVE_DOUBLE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Double.parseDouble(value) < 0.0) { - return "the value of the setting " + setting + " must be a non negative double"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as a double"; - } - return null; - } - }; - - Validator DOUBLE_GTE_2 = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Double.parseDouble(value) < 2.0) { - return "the value of the setting " + setting + " must be >= 2.0"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as a double"; - } - return null; - } - }; - - Validator INTEGER = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - Integer.parseInt(value); - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as an integer"; - } - return null; - } - }; - - Validator POSITIVE_INTEGER = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Integer.parseInt(value) <= 0) { - return "the value of the setting " + setting + " must be a positive integer"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as an integer"; - } - return null; - } - }; - - Validator NON_NEGATIVE_INTEGER = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Integer.parseInt(value) < 0) { - return "the value of the setting " + setting + " must be a non negative integer"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as an integer"; - } - return null; - } - }; - - Validator INTEGER_GTE_2 = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Integer.parseInt(value) < 2) { - return "the value of the setting " + setting + " must be >= 2"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as an integer"; - } - return null; - } - }; - - Validator BYTES_SIZE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - parseBytesSizeValue(value, setting); - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - Validator POSITIVE_BYTES_SIZE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState state) { - try { - ByteSizeValue byteSizeValue = parseBytesSizeValue(value, setting); - if (byteSizeValue.getBytes() <= 0) { - return setting + " must be a positive byte size value"; - } - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - Validator PERCENTAGE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (value == null) { - return "the value of " + setting + " can not be null"; - } - if (!value.endsWith("%")) { - return "the value [" + value + "] for " + setting + " must end with %"; - } - final double asDouble = Double.parseDouble(value.substring(0, value.length() - 1)); - if (asDouble < 0.0 || asDouble > 100.0) { - return "the value [" + value + "] for " + setting + " must be a percentage between 0% and 100%"; - } - } catch (NumberFormatException ex) { - return ex.getMessage(); - } - return null; - } - }; - - - Validator BYTES_SIZE_OR_PERCENTAGE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - String byteSize = BYTES_SIZE.validate(setting, value, clusterState); - if (byteSize != null) { - String percentage = PERCENTAGE.validate(setting, value, clusterState); - if (percentage == null) { - return null; - } - return percentage + " or be a valid bytes size value, like [16mb]"; - } - return null; - } - }; - - - Validator MEMORY_SIZE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - parseBytesSizeValueOrHeapRatio(value, setting); - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - public static final Validator BOOLEAN = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - - if (value != null && (Booleans.isExplicitFalse(value) || Booleans.isExplicitTrue(value))) { - return null; - } - return "cannot parse value [" + value + "] as a boolean"; - } - }; -} diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index c15e0d9130e..bda0106f2b6 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -36,7 +36,7 @@ public class CircleBuilder extends ShapeBuilder { public static final String FIELD_RADIUS = "radius"; public static final GeoShapeType TYPE = GeoShapeType.CIRCLE; - static final CircleBuilder PROTOTYPE = new CircleBuilder(); + public static final CircleBuilder PROTOTYPE = new CircleBuilder(); private DistanceUnit unit = DistanceUnit.DEFAULT; private double radius; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index 9ad51292977..426cbbf7800 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -33,7 +33,7 @@ public class EnvelopeBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; - static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(new Coordinate(-1.0, 1.0), new Coordinate(1.0, -1.0)); + public static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(new Coordinate(-1.0, 1.0), new Coordinate(1.0, -1.0)); private Coordinate topLeft; private Coordinate bottomRight; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index 6e0094f7165..420f61a6799 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -36,7 +36,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.GEOMETRYCOLLECTION; - static final GeometryCollectionBuilder PROTOTYPE = new GeometryCollectionBuilder(); + public static final GeometryCollectionBuilder PROTOTYPE = new GeometryCollectionBuilder(); protected final ArrayList shapes = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index 4542da436bd..8c2870e1e09 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -57,7 +57,7 @@ public class LineStringBuilder extends CoordinateCollection { public static final GeoShapeType TYPE = GeoShapeType.LINESTRING; - static final LineStringBuilder PROTOTYPE = new LineStringBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).coordinate(1.0, 1.0)); + public static final LineStringBuilder PROTOTYPE = new LineStringBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).coordinate(1.0, 1.0)); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index 0de79ac8fe8..e69c0abe4f8 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -37,7 +37,7 @@ public class MultiLineStringBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTILINESTRING; - static final MultiLineStringBuilder PROTOTYPE = new MultiLineStringBuilder(); + public static final MultiLineStringBuilder PROTOTYPE = new MultiLineStringBuilder(); private final ArrayList lines = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index 1ed976f43ee..12b16254957 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -37,7 +37,7 @@ public class MultiPointBuilder extends CoordinateCollection { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOINT; - final static MultiPointBuilder PROTOTYPE = new MultiPointBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).build()); + public static final MultiPointBuilder PROTOTYPE = new MultiPointBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).build()); /** * Create a new {@link MultiPointBuilder}. diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 8d77d395155..394892d909d 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -36,7 +36,7 @@ import java.util.Objects; public class MultiPolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON; - static final MultiPolygonBuilder PROTOTYPE = new MultiPolygonBuilder(); + public static final MultiPolygonBuilder PROTOTYPE = new MultiPolygonBuilder(); private final ArrayList polygons = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index 40e57566d48..1cee6525e7a 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -32,7 +32,7 @@ import java.util.Objects; public class PointBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POINT; - static final PointBuilder PROTOTYPE = new PointBuilder(); + public static final PointBuilder PROTOTYPE = new PointBuilder(); private Coordinate coordinate; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 36589a4a4a7..ab480cfbc24 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -53,7 +53,7 @@ import java.util.concurrent.atomic.AtomicBoolean; public class PolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POLYGON; - static final PolygonBuilder PROTOTYPE = new PolygonBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).coordinate(0.0, 1.0) + public static final PolygonBuilder PROTOTYPE = new PolygonBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).coordinate(0.0, 1.0) .coordinate(1.0, 0.0).coordinate(0.0, 0.0)); private static final Coordinate[][] EMPTY = new Coordinate[0][]; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java deleted file mode 100644 index c66e969aa3a..00000000000 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.geo.builders; - -import org.elasticsearch.common.geo.ShapesAvailability; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; - -/** - * Register the shape builder prototypes with the {@link NamedWriteableRegistry} - */ -public class ShapeBuilderRegistry { - - @Inject - public ShapeBuilderRegistry(NamedWriteableRegistry namedWriteableRegistry) { - if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, GeometryCollectionBuilder.PROTOTYPE); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/common/inject/matcher/AbstractMatcher.java b/core/src/main/java/org/elasticsearch/common/inject/matcher/AbstractMatcher.java index 9ad7bc407d0..931d290fc19 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/matcher/AbstractMatcher.java +++ b/core/src/main/java/org/elasticsearch/common/inject/matcher/AbstractMatcher.java @@ -49,8 +49,8 @@ public abstract class AbstractMatcher implements Matcher { @Override public boolean equals(Object other) { return other instanceof AndMatcher - && ((AndMatcher) other).a.equals(a) - && ((AndMatcher) other).b.equals(b); + && ((AndMatcher) other).a.equals(a) + && ((AndMatcher) other).b.equals(b); } @Override @@ -80,8 +80,8 @@ public abstract class AbstractMatcher implements Matcher { @Override public boolean equals(Object other) { return other instanceof OrMatcher - && ((OrMatcher) other).a.equals(a) - && ((OrMatcher) other).b.equals(b); + && ((OrMatcher) other).a.equals(a) + && ((OrMatcher) other).b.equals(b); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java index b1271e7338d..92aa02ba002 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java @@ -20,9 +20,11 @@ package org.elasticsearch.common.lucene; import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReader.CoreClosedListener; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; +import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -72,7 +74,7 @@ public final class ShardCoreKeyMap { } final boolean added = objects.add(coreKey); assert added; - reader.addCoreClosedListener(ownerCoreCacheKey -> { + CoreClosedListener listener = ownerCoreCacheKey -> { assert coreKey == ownerCoreCacheKey; synchronized (ShardCoreKeyMap.this) { coreKeyToShard.remove(ownerCoreCacheKey); @@ -83,7 +85,20 @@ public final class ShardCoreKeyMap { indexToCoreKey.remove(index); } } - }); + }; + boolean addedListener = false; + try { + reader.addCoreClosedListener(listener); + addedListener = true; + } finally { + if (false == addedListener) { + try { + listener.onClose(coreKey); + } catch (IOException e) { + throw new RuntimeException("Blow up trying to recover from failure to add listener", e); + } + } + } } } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index 73c3fc9400d..01184d1cffb 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -117,6 +117,12 @@ public class Queries { if (minimumShouldMatch == null) { return query; } + // Queries with a single word expanded with synonyms + // have their coordination factor disabled (@see org.apache.lucene.util.QueryBuilder#analyzeBoolean()). + // minimumShouldMatch should not be applicable in such case. + if (query.isCoordDisabled()) { + return query; + } int optionalClauses = 0; for (BooleanClause c : query.clauses()) { if (c.getOccur() == BooleanClause.Occur.SHOULD) { diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index b3abed6e230..a1e8261e63c 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -19,6 +19,9 @@ package org.elasticsearch.common.network; +import java.util.Arrays; +import java.util.List; + import org.elasticsearch.client.support.Headers; import org.elasticsearch.client.transport.TransportClientNodesService; import org.elasticsearch.client.transport.support.TransportProxyClient; @@ -135,9 +138,6 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.local.LocalTransport; import org.elasticsearch.transport.netty.NettyTransport; -import java.util.Arrays; -import java.util.List; - /** * A module to handle registering and binding all network related classes. */ @@ -291,6 +291,7 @@ public class NetworkModule extends AbstractModule { private final ExtensionPoint.ClassSet restHandlers = new ExtensionPoint.ClassSet<>("rest_handler", RestHandler.class); // we must separate the cat rest handlers so RestCatAction can collect them... private final ExtensionPoint.ClassSet catHandlers = new ExtensionPoint.ClassSet<>("cat_handler", AbstractCatAction.class); + private final NamedWriteableRegistry namedWriteableRegistry; /** * Creates a network module that custom networking classes can be plugged into. @@ -298,11 +299,13 @@ public class NetworkModule extends AbstractModule { * @param networkService A constructed network service object to bind. * @param settings The settings for the node * @param transportClient True if only transport classes should be allowed to be registered, false otherwise. + * @param namedWriteableRegistry registry for named writeables for use during streaming */ - public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient) { + public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient, NamedWriteableRegistry namedWriteableRegistry) { this.networkService = networkService; this.settings = settings; this.transportClient = transportClient; + this.namedWriteableRegistry = namedWriteableRegistry; registerTransportService(NETTY_TRANSPORT, TransportService.class); registerTransport(LOCAL_TRANSPORT, LocalTransport.class); registerTransport(NETTY_TRANSPORT, NettyTransport.class); @@ -354,7 +357,7 @@ public class NetworkModule extends AbstractModule { @Override protected void configure() { bind(NetworkService.class).toInstance(networkService); - bind(NamedWriteableRegistry.class).asEagerSingleton(); + bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); transportServiceTypes.bindType(binder(), settings, TRANSPORT_SERVICE_TYPE_KEY, NETTY_TRANSPORT); String defaultTransport = DiscoveryNode.localNode(settings) ? LOCAL_TRANSPORT : NETTY_TRANSPORT; diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 2c599559920..73d13b2de20 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -21,9 +21,13 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.util.set.Sets; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -36,24 +40,38 @@ import java.util.function.Consumer; */ public abstract class AbstractScopedSettings extends AbstractComponent { private Settings lastSettingsApplied = Settings.EMPTY; - private final List settingUpdaters = new ArrayList<>(); + private final List> settingUpdaters = new ArrayList<>(); private final Map> complexMatchers = new HashMap<>(); private final Map> keySettings = new HashMap<>(); private final Setting.Scope scope; protected AbstractScopedSettings(Settings settings, Set> settingsSet, Setting.Scope scope) { super(settings); - for (Setting entry : settingsSet) { - if (entry.getScope() != scope) { - throw new IllegalArgumentException("Setting must be a cluster setting but was: " + entry.getScope()); - } - if (entry.hasComplexMatcher()) { - complexMatchers.put(entry.getKey(), entry); - } else { - keySettings.put(entry.getKey(), entry); - } - } + this.lastSettingsApplied = Settings.EMPTY; this.scope = scope; + for (Setting entry : settingsSet) { + addSetting(entry); + } + } + + protected AbstractScopedSettings(Settings nodeSettings, Settings scopeSettings, AbstractScopedSettings other) { + super(nodeSettings); + this.lastSettingsApplied = scopeSettings; + this.scope = other.scope; + complexMatchers.putAll(other.complexMatchers); + keySettings.putAll(other.keySettings); + settingUpdaters.addAll(other.settingUpdaters); + } + + protected final void addSetting(Setting setting) { + if (setting.getScope() != scope) { + throw new IllegalArgumentException("Setting must be a " + scope + " setting but was: " + setting.getScope()); + } + if (setting.hasComplexMatcher()) { + complexMatchers.putIfAbsent(setting.getKey(), setting); + } else { + keySettings.putIfAbsent(setting.getKey(), setting); + } } public Setting.Scope getScope() { @@ -68,7 +86,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { final Settings current = Settings.builder().put(this.settings).put(settings).build(); final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); List exceptions = new ArrayList<>(); - for (SettingUpdater settingUpdater : settingUpdaters) { + for (SettingUpdater settingUpdater : settingUpdaters) { try { if (settingUpdater.hasChanged(current, previous)) { settingUpdater.getValue(current, previous); @@ -99,7 +117,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); try { List applyRunnables = new ArrayList<>(); - for (SettingUpdater settingUpdater : settingUpdaters) { + for (SettingUpdater settingUpdater : settingUpdaters) { try { applyRunnables.add(settingUpdater.updater(current, previous)); } catch (Exception ex) { @@ -161,9 +179,38 @@ public abstract class AbstractScopedSettings extends AbstractComponent { addSettingsUpdateConsumer(setting, consumer, (s) -> {}); } + /** + * Validates that all settings in the builder are registered and valid + */ + public final void validate(Settings.Builder settingsBuilder) { + validate(settingsBuilder.build()); + } + + /** + * * Validates that all given settings are registered and valid + */ + public final void validate(Settings settings) { + for (Map.Entry entry : settings.getAsMap().entrySet()) { + validate(entry.getKey(), settings); + } + } + + + /** + * Validates that the setting is valid + */ + public final void validate(String key, Settings settings) { + Setting setting = get(key); + if (setting == null) { + throw new IllegalArgumentException("unknown setting [" + key + "]"); + } + setting.get(settings); + } + /** * Transactional interface to update settings. * @see Setting + * @param the type of the value of the setting */ public interface SettingUpdater { @@ -216,17 +263,16 @@ public abstract class AbstractScopedSettings extends AbstractComponent { /** * Returns the {@link Setting} for the given key or null if the setting can not be found. */ - public Setting get(String key) { + public Setting get(String key) { Setting setting = keySettings.get(key); - if (setting == null) { - for (Map.Entry> entry : complexMatchers.entrySet()) { - if (entry.getValue().match(key)) { - return entry.getValue(); - } - } - } else { + if (setting != null) { return setting; } + for (Map.Entry> entry : complexMatchers.entrySet()) { + if (entry.getValue().match(key)) { + return entry.getValue(); + } + } return null; } @@ -234,7 +280,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { * Returns true if the setting for the given key is dynamically updateable. Otherwise false. */ public boolean hasDynamicSetting(String key) { - final Setting setting = get(key); + final Setting setting = get(key); return setting != null && setting.isDynamic(); } @@ -253,4 +299,93 @@ public abstract class AbstractScopedSettings extends AbstractComponent { return builder.build(); } + /** + * Returns the value for the given setting. + */ + public T get(Setting setting) { + if (setting.getScope() != scope) { + throw new IllegalArgumentException("settings scope doesn't match the setting scope [" + this.scope + "] != [" + setting.getScope() + "]"); + } + if (get(setting.getKey()) == null) { + throw new IllegalArgumentException("setting " + setting.getKey() + " has not been registered"); + } + return setting.get(this.lastSettingsApplied, settings); + } + + /** + * Updates a target settings builder with new, updated or deleted settings from a given settings builder. + *

+ * Note: This method will only allow updates to dynamic settings. if a non-dynamic setting is updated an {@link IllegalArgumentException} is thrown instead. + *

+ * @param toApply the new settings to apply + * @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be removed from this builder + * @param updates a settings builder that holds all updates applied to target + * @param type a free text string to allow better exceptions messages + * @return true if the target has changed otherwise false + */ + public boolean updateDynamicSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) { + return updateSettings(toApply, target, updates, type, true); + } + + /** + * Updates a target settings builder with new, updated or deleted settings from a given settings builder. + * @param toApply the new settings to apply + * @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be removed from this builder + * @param updates a settings builder that holds all updates applied to target + * @param type a free text string to allow better exceptions messages + * @return true if the target has changed otherwise false + */ + public boolean updateSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) { + return updateSettings(toApply, target, updates, type, false); + } + + /** + * Updates a target settings builder with new, updated or deleted settings from a given settings builder. + * @param toApply the new settings to apply + * @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be removed from this builder + * @param updates a settings builder that holds all updates applied to target + * @param type a free text string to allow better exceptions messages + * @param onlyDynamic if false all settings are updated otherwise only dynamic settings are updated. if set to true and a non-dynamic setting is updated an exception is thrown. + * @return true if the target has changed otherwise false + */ + private boolean updateSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type, boolean onlyDynamic) { + boolean changed = false; + final Set toRemove = new HashSet<>(); + Settings.Builder settingsBuilder = Settings.settingsBuilder(); + for (Map.Entry entry : toApply.getAsMap().entrySet()) { + if (entry.getValue() == null) { + toRemove.add(entry.getKey()); + } else if ((onlyDynamic == false && get(entry.getKey()) != null) || hasDynamicSetting(entry.getKey())) { + validate(entry.getKey(), toApply); + settingsBuilder.put(entry.getKey(), entry.getValue()); + updates.put(entry.getKey(), entry.getValue()); + changed = true; + } else { + throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable"); + } + + } + changed |= applyDeletes(toRemove, target); + target.put(settingsBuilder.build()); + return changed; + } + + private static final boolean applyDeletes(Set deletes, Settings.Builder builder) { + boolean changed = false; + for (String entry : deletes) { + Set keysToRemove = new HashSet<>(); + Set keySet = builder.internalMap().keySet(); + for (String key : keySet) { + if (Regex.simpleMatch(entry, key)) { + keysToRemove.add(key); + } + } + for (String key : keysToRemove) { + builder.remove(key); + changed = true; + } + } + return changed; + } + } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 10c602688a4..1e764dce42c 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -38,6 +38,8 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; +import org.elasticsearch.gateway.PrimaryShardAllocator; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.store.IndexStoreConfig; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.recovery.RecoverySettings; @@ -62,7 +64,6 @@ public final class ClusterSettings extends AbstractScopedSettings { super(settings, settingsSet, Setting.Scope.CLUSTER); } - @Override public synchronized Settings applySettings(Settings newSettings) { Settings settings = super.applySettings(newSettings); @@ -83,6 +84,11 @@ public final class ClusterSettings extends AbstractScopedSettings { return settings; } + @Override + public boolean hasDynamicSetting(String key) { + return isLoggerSetting(key) || super.hasDynamicSetting(key); + } + /** * Returns true if the settings is a logger setting. */ @@ -149,5 +155,8 @@ public final class ClusterSettings extends AbstractScopedSettings { HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, Transport.TRANSPORT_PROFILES_SETTING, - Transport.TRANSPORT_TCP_COMPRESS))); + Transport.TRANSPORT_TCP_COMPRESS, + IndexSettings.QUERY_STRING_ANALYZE_WILDCARD, + IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD, + PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING))); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java new file mode 100644 index 00000000000..997ea794c99 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -0,0 +1,155 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.settings; + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.gateway.PrimaryShardAllocator; +import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexingSlowLog; +import org.elasticsearch.index.MergePolicyConfig; +import org.elasticsearch.index.MergeSchedulerConfig; +import org.elasticsearch.index.SearchSlowLog; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; +import org.elasticsearch.index.store.FsDirectoryService; +import org.elasticsearch.index.store.IndexStore; +import org.elasticsearch.index.store.Store; +import org.elasticsearch.indices.cache.request.IndicesRequestCache; +import org.elasticsearch.search.SearchService; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + +/** + * Encapsulates all valid index level settings. + * @see org.elasticsearch.common.settings.Setting.Scope#INDEX + */ +public final class IndexScopedSettings extends AbstractScopedSettings { + + public static final Predicate INDEX_SETTINGS_KEY_PREDICATE = (s) -> s.startsWith(IndexMetaData.INDEX_SETTING_PREFIX); + + public static Set> BUILT_IN_INDEX_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING, + IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING, + IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, + MergeSchedulerConfig.AUTO_THROTTLE_SETTING, + MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING, + MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING, + IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING, + IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING, + IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING, + IndexMetaData.INDEX_AUTO_EXPAND_REPLICAS_SETTING, + IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING, + IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING, + IndexMetaData.INDEX_SHADOW_REPLICAS_SETTING, + IndexMetaData.INDEX_SHARED_FILESYSTEM_SETTING, + IndexMetaData.INDEX_READ_ONLY_SETTING, + IndexMetaData.INDEX_BLOCKS_READ_SETTING, + IndexMetaData.INDEX_BLOCKS_WRITE_SETTING, + IndexMetaData.INDEX_BLOCKS_METADATA_SETTING, + IndexMetaData.INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING, + IndexMetaData.INDEX_PRIORITY_SETTING, + IndexMetaData.INDEX_DATA_PATH_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING, + MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING, + IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING, + IndexSettings.INDEX_WARMER_ENABLED_SETTING, + IndexSettings.INDEX_REFRESH_INTERVAL_SETTING, + IndexSettings.MAX_RESULT_WINDOW_SETTING, + IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING, + IndexSettings.DEFAULT_FIELD_SETTING, + IndexSettings.QUERY_STRING_LENIENT_SETTING, + IndexSettings.ALLOW_UNMAPPED, + IndexSettings.INDEX_CHECK_ON_STARTUP, + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING, + IndexSettings.INDEX_GC_DELETES_SETTING, + IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING, + UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, + EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING, + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING, + IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING, + IndexFieldDataService.INDEX_FIELDDATA_CACHE_KEY, + FieldMapper.IGNORE_MALFORMED_SETTING, + FieldMapper.COERCE_SETTING, + Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING, + PercolatorQueriesRegistry.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING, + MapperService.INDEX_MAPPER_DYNAMIC_SETTING, + MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING, + BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING, + IndexModule.INDEX_STORE_TYPE_SETTING, + IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING, + IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING, + PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS_SETTING, + FsDirectoryService.INDEX_LOCK_FACTOR_SETTING, + EngineConfig.INDEX_CODEC_SETTING, + SearchService.INDEX_NORMS_LOADING_SETTING, + // this sucks but we can't really validate all the analyzers/similarity in here + Setting.groupSetting("index.similarity.", false, Setting.Scope.INDEX), // this allows similarity settings to be passed + Setting.groupSetting("index.analysis.", false, Setting.Scope.INDEX) // this allows analysis settings to be passed + + ))); + + public static final IndexScopedSettings DEFAULT_SCOPED_SETTINGS = new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + + public IndexScopedSettings(Settings settings, Set> settingsSet) { + super(settings, settingsSet, Setting.Scope.INDEX); + } + + private IndexScopedSettings(Settings settings, IndexScopedSettings other, IndexMetaData metaData) { + super(settings, metaData.getSettings(), other); + } + + public IndexScopedSettings copy(Settings settings, IndexMetaData metaData) { + return new IndexScopedSettings(settings, this, metaData); + } +} diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 4e9e9f9428b..0fc5b062e69 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -36,12 +36,15 @@ import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.regex.Pattern; /** + * A setting. Encapsulates typical stuff like default value, parsing, and scope. + * Some (dynamic=true) can by modified at run time using the API. */ public class Setting extends ToXContentToBytes { private final String key; @@ -165,6 +168,16 @@ public class Setting extends ToXContentToBytes { return builder; } + /** + * Returns the value for this setting but falls back to the second provided settings object + */ + public final T get(Settings primary, Settings secondary) { + if (exists(primary)) { + return get(primary); + } + return get(secondary); + } + /** * The settings scope - settings can either be cluster settings or per index settings. */ @@ -173,11 +186,18 @@ public class Setting extends ToXContentToBytes { INDEX; } - final AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger) { + /** + * Build a new updater with a noop validator. + */ + final AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger) { return newUpdater(consumer, logger, (s) -> {}); } - AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer validator) { + /** + * Build the updater responsible for validating new values, logging the new + * value, and eventually setting the value where it belongs. + */ + AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer validator) { if (isDynamic()) { return new Updater(consumer, logger, validator); } else { @@ -216,7 +236,7 @@ public class Setting extends ToXContentToBytes { } - private class Updater implements AbstractScopedSettings.SettingUpdater { + private final class Updater implements AbstractScopedSettings.SettingUpdater { private final Consumer consumer; private final ESLogger logger; private final Consumer accept; @@ -256,8 +276,8 @@ public class Setting extends ToXContentToBytes { } @Override - public void apply(T value, Settings current, Settings previous) { - logger.info("update [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current)); + public final void apply(T value, Settings current, Settings previous) { + logger.info("updating [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current)); consumer.accept(value); } } @@ -285,6 +305,10 @@ public class Setting extends ToXContentToBytes { return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), dynamic, scope); } + public static Setting longSetting(String key, long defaultValue, long minValue, boolean dynamic, Scope scope) { + return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), dynamic, scope); + } + public static int parseInt(String s, int minValue, String key) { int value = Integer.parseInt(s); if (value < minValue) { @@ -293,6 +317,14 @@ public class Setting extends ToXContentToBytes { return value; } + public static long parseLong(String s, long minValue, String key) { + long value = Long.parseLong(s); + if (value < minValue) { + throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + } + return value; + } + public static Setting intSetting(String key, int defaultValue, boolean dynamic, Scope scope) { return intSetting(key, defaultValue, Integer.MIN_VALUE, dynamic, scope); } @@ -343,6 +375,7 @@ public class Setting extends ToXContentToBytes { return array == null ? defaultValue.apply(settings) : arrayToParsableString(array); } + @Override public boolean match(String toTest) { return pattern.matcher(toTest).matches(); } @@ -420,6 +453,7 @@ public class Setting extends ToXContentToBytes { @Override public void apply(Settings value, Settings current, Settings previous) { + logger.info("updating [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current)); consumer.accept(value); } @@ -460,4 +494,16 @@ public class Setting extends ToXContentToBytes { }, dynamic, scope); } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Setting setting = (Setting) o; + return Objects.equals(key, setting.key); + } + + @Override + public int hashCode() { + return Objects.hash(key); + } } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index fadc52a556f..601ec7a4bf6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -58,6 +58,7 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -212,6 +213,19 @@ public final class Settings implements ToXContent { return builder.build(); } + /** + * Returns a new settings object that contains all setting of the current one filtered by the given settings key predicate. + */ + public Settings filter(Predicate predicate) { + Builder builder = new Builder(); + for (Map.Entry entry : getAsMap().entrySet()) { + if (predicate.test(entry.getKey())) { + builder.put(entry.getKey(), entry.getValue()); + } + } + return builder.build(); + } + /** * Returns the settings mapped to the given setting name. */ diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 8bc8ce1b651..8298c0c127d 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -34,8 +34,8 @@ public class SettingsModule extends AbstractModule { private final Settings settings; private final SettingsFilter settingsFilter; - private final Map> clusterDynamicSettings = new HashMap<>(); - + private final Map> clusterSettings = new HashMap<>(); + private final Map> indexSettings = new HashMap<>(); public SettingsModule(Settings settings, SettingsFilter settingsFilter) { this.settings = settings; @@ -43,26 +43,48 @@ public class SettingsModule extends AbstractModule { for (Setting setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) { registerSetting(setting); } + for (Setting setting : IndexScopedSettings.BUILT_IN_INDEX_SETTINGS) { + registerSetting(setting); + } } @Override protected void configure() { + final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values())); + final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.clusterSettings.values())); + // by now we are fully configured, lets check node level settings for unregistered index settings + indexScopedSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE)); + // we can't call this method yet since we have not all node level settings registered. + // yet we can validate the ones we have registered to not have invalid values. this is better than nothing + // and progress over perfection and we fail as soon as possible. + // clusterSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate())); + for (Map.Entry entry : settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate()).getAsMap().entrySet()) { + if (clusterSettings.get(entry.getKey()) != null) { + clusterSettings.validate(entry.getKey(), settings); + } + } + bind(Settings.class).toInstance(settings); bind(SettingsFilter.class).toInstance(settingsFilter); - final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(clusterDynamicSettings.values())); + bind(ClusterSettings.class).toInstance(clusterSettings); + bind(IndexScopedSettings.class).toInstance(indexScopedSettings); } public void registerSetting(Setting setting) { switch (setting.getScope()) { case CLUSTER: - if (clusterDynamicSettings.containsKey(setting.getKey())) { + if (clusterSettings.containsKey(setting.getKey())) { throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); } - clusterDynamicSettings.put(setting.getKey(), setting); + clusterSettings.put(setting.getKey(), setting); break; case INDEX: - throw new UnsupportedOperationException("not yet implemented"); + if (indexSettings.containsKey(setting.getKey())) { + throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + } + indexSettings.put(setting.getKey(), setting); + break; } } diff --git a/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java b/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java index 105bda28a99..32df65850a8 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java @@ -20,12 +20,10 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.util.Locale; @@ -176,7 +174,6 @@ public class ByteSizeValue implements Streamable { public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue, String settingName) throws ElasticsearchParseException { settingName = Objects.requireNonNull(settingName); - assert settingName.startsWith("index.") == false || MetaDataIndexUpgradeService.INDEX_BYTES_SIZE_SETTINGS.contains(settingName); if (sValue == null) { return defaultValue; } diff --git a/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java b/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java index 18641b8e418..24a727691cc 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java +++ b/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java @@ -67,6 +67,8 @@ public final class Fuzziness implements ToXContent, Writeable { /** * Creates a {@link Fuzziness} instance from an edit distance. The value must be one of [0, 1, 2] + * + * Note: Using this method only makes sense if the field you are applying Fuzziness to is some sort of string. */ public static Fuzziness fromEdits(int edits) { return new Fuzziness(edits); diff --git a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java index dfa0c62d7a6..b1081c2c623 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java @@ -20,12 +20,10 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.settings.Settings; import org.joda.time.Period; import org.joda.time.PeriodType; import org.joda.time.format.PeriodFormat; @@ -254,7 +252,6 @@ public class TimeValue implements Streamable { public static TimeValue parseTimeValue(String sValue, TimeValue defaultValue, String settingName) { settingName = Objects.requireNonNull(settingName); - assert settingName.startsWith("index.") == false || MetaDataIndexUpgradeService.INDEX_TIME_SETTINGS.contains(settingName) : settingName; if (sValue == null) { return defaultValue; } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java index 374759f7889..6c3e24d1ab9 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java @@ -51,6 +51,14 @@ public abstract class PrioritizedRunnable implements Runnable, Comparable INITIAL_SHARDS_PARSER = (value) -> { + switch (value) { + case "quorum": + case "quorum-1": + case "half": + case "one": + case "full": + case "full-1": + case "all-1": + case "all": + return value; + default: + Integer.parseInt(value); // it can be parsed that's all we care here? + return value; + } + }; - private final String initialShards; + public static final Setting NODE_INITIAL_SHARDS_SETTING = new Setting<>("gateway.initial_shards", (settings) -> settings.get("gateway.local.initial_shards", "quorum"), INITIAL_SHARDS_PARSER, true, Setting.Scope.CLUSTER); + @Deprecated + public static final Setting INDEX_RECOVERY_INITIAL_SHARDS_SETTING = new Setting<>("index.recovery.initial_shards", (settings) -> NODE_INITIAL_SHARDS_SETTING.get(settings) , INITIAL_SHARDS_PARSER, true, Setting.Scope.INDEX); public PrimaryShardAllocator(Settings settings) { super(settings); - this.initialShards = settings.get("gateway.initial_shards", settings.get("gateway.local.initial_shards", "quorum")); - logger.debug("using initial_shards [{}]", initialShards); + logger.debug("using initial_shards [{}]", NODE_INITIAL_SHARDS_SETTING.get(settings)); } public boolean allocateUnassigned(RoutingAllocation allocation) { @@ -73,7 +90,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { } final IndexMetaData indexMetaData = metaData.index(shard.getIndex()); - final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings, Collections.emptyList()); + final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); if (shard.allocatedPostIndexCreate(indexMetaData) == false) { // when we create a fresh index @@ -209,29 +226,25 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { // check if the counts meets the minimum set int requiredAllocation = 1; // if we restore from a repository one copy is more then enough - try { - String initialShards = indexMetaData.getSettings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards)); - if ("quorum".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 1) { - requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2) + 1; - } - } else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 2) { - requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2); - } - } else if ("one".equals(initialShards)) { - requiredAllocation = 1; - } else if ("full".equals(initialShards) || "all".equals(initialShards)) { - requiredAllocation = indexMetaData.getNumberOfReplicas() + 1; - } else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 1) { - requiredAllocation = indexMetaData.getNumberOfReplicas(); - } - } else { - requiredAllocation = Integer.parseInt(initialShards); + String initialShards = INDEX_RECOVERY_INITIAL_SHARDS_SETTING.get(indexMetaData.getSettings(), settings); + if ("quorum".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 1) { + requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2) + 1; } - } catch (Exception e) { - logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard); + } else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 2) { + requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2); + } + } else if ("one".equals(initialShards)) { + requiredAllocation = 1; + } else if ("full".equals(initialShards) || "all".equals(initialShards)) { + requiredAllocation = indexMetaData.getNumberOfReplicas() + 1; + } else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 1) { + requiredAllocation = indexMetaData.getNumberOfReplicas(); + } + } else { + requiredAllocation = Integer.parseInt(initialShards); } return nodesAndVersions.allocationsFound >= requiredAllocation; @@ -336,7 +349,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { */ private boolean recoverOnAnyNode(IndexSettings indexSettings) { return indexSettings.isOnSharedFilesystem() - && indexSettings.getSettings().getAsBoolean(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false); + && IndexMetaData.INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING.get(indexSettings.getSettings()); } protected abstract AsyncShardFetch.FetchResult fetchData(ShardRouting shard, RoutingAllocation allocation); diff --git a/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java b/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java index 4f70bf41488..c5c5794a788 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java @@ -56,7 +56,7 @@ public abstract class PriorityComparator implements Comparator { } private int priority(Settings settings) { - return settings.getAsInt(IndexMetaData.SETTING_PRIORITY, 1); + return IndexMetaData.INDEX_PRIORITY_SETTING.get(settings); } private long timeCreated(Settings settings) { diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index 27ee0c17dab..ca991f14e2a 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -130,7 +130,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction if (metaData != null) { ShardPath shardPath = null; try { - IndexSettings indexSettings = new IndexSettings(metaData, settings, Collections.emptyList()); + IndexSettings indexSettings = new IndexSettings(metaData, settings); shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, indexSettings); if (shardPath == null) { throw new IllegalStateException(shardId + " no shard path found"); diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index 9a8357daa26..b09d91b1c20 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -20,6 +20,8 @@ package org.elasticsearch.index; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.analysis.AnalysisRegistry; @@ -35,7 +37,6 @@ import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStoreConfig; -import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.mapper.MapperRegistry; @@ -47,6 +48,7 @@ import java.util.Map; import java.util.Set; import java.util.function.BiFunction; import java.util.function.Consumer; +import java.util.function.Function; /** * IndexModule represents the central extension point for index level custom implementations like: @@ -57,25 +59,24 @@ import java.util.function.Consumer; * "index.similarity.my_similarity.type : "BM25" can be used. *
  • {@link IndexStore} - Custom {@link IndexStore} instances can be registered via {@link #addIndexStore(String, BiFunction)}
  • *
  • {@link IndexEventListener} - Custom {@link IndexEventListener} instances can be registered via {@link #addIndexEventListener(IndexEventListener)}
  • - *
  • Settings update listener - Custom settings update listener can be registered via {@link #addIndexSettingsListener(Consumer)}
  • + *
  • Settings update listener - Custom settings update listener can be registered via {@link #addSettingsUpdateConsumer(Setting, Consumer)}
  • * */ public final class IndexModule { - public static final String STORE_TYPE = "index.store.type"; + public static final Setting INDEX_STORE_TYPE_SETTING = new Setting<>("index.store.type", "", Function.identity(), false, Setting.Scope.INDEX); public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity"; public static final String INDEX_QUERY_CACHE = "index"; public static final String NONE_QUERY_CACHE = "none"; - public static final String QUERY_CACHE_TYPE = "index.queries.cache.type"; + public static final Setting INDEX_QUERY_CACHE_TYPE_SETTING = new Setting<>("index.queries.cache.type", INDEX_QUERY_CACHE, Function.identity(), false, Setting.Scope.INDEX); // for test purposes only - public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything"; + public static final Setting INDEX_QUERY_CACHE_EVERYTHING_SETTING = Setting.boolSetting("index.queries.cache.everything", false, false, Setting.Scope.INDEX); private final IndexSettings indexSettings; private final IndexStoreConfig indexStoreConfig; private final AnalysisRegistry analysisRegistry; // pkg private so tests can mock final SetOnce engineFactory = new SetOnce<>(); private SetOnce indexSearcherWrapper = new SetOnce<>(); - private final Set> settingsConsumers = new HashSet<>(); private final Set indexEventListeners = new HashSet<>(); private IndexEventListener listener; private final Map> similarities = new HashMap<>(); @@ -92,17 +93,13 @@ public final class IndexModule { } /** - * Adds a settings consumer for this index + * Adds a Setting and it's consumer for this index. */ - public void addIndexSettingsListener(Consumer listener) { - if (listener == null) { - throw new IllegalArgumentException("listener must not be null"); + public void addSettingsUpdateConsumer(Setting setting, Consumer consumer) { + if (setting == null) { + throw new IllegalArgumentException("setting must not be null"); } - - if (settingsConsumers.contains(listener)) { - throw new IllegalStateException("listener already registered"); - } - settingsConsumers.add(listener); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(setting, consumer); } /** @@ -245,27 +242,29 @@ public final class IndexModule { public IndexService newIndexService(NodeEnvironment environment, IndexService.ShardStoreDeleter shardStoreDeleter, NodeServicesProvider servicesProvider, MapperRegistry mapperRegistry, IndexingOperationListener... listeners) throws IOException { - final IndexSettings settings = indexSettings.newWithListener(settingsConsumers); IndexSearcherWrapperFactory searcherWrapperFactory = indexSearcherWrapper.get() == null ? (shard) -> null : indexSearcherWrapper.get(); IndexEventListener eventListener = freeze(); - final String storeType = settings.getSettings().get(STORE_TYPE); + final String storeType = indexSettings.getValue(INDEX_STORE_TYPE_SETTING); final IndexStore store; - if (storeType == null || isBuiltinType(storeType)) { - store = new IndexStore(settings, indexStoreConfig); + if (Strings.isEmpty(storeType) || isBuiltinType(storeType)) { + store = new IndexStore(indexSettings, indexStoreConfig); } else { BiFunction factory = storeTypes.get(storeType); if (factory == null) { throw new IllegalArgumentException("Unknown store type [" + storeType + "]"); } - store = factory.apply(settings, indexStoreConfig); + store = factory.apply(indexSettings, indexStoreConfig); if (store == null) { throw new IllegalStateException("store must not be null"); } } - final String queryCacheType = settings.getSettings().get(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, store::setMaxRate); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING, store::setType); + final String queryCacheType = indexSettings.getValue(INDEX_QUERY_CACHE_TYPE_SETTING); final BiFunction queryCacheProvider = queryCaches.get(queryCacheType); - final QueryCache queryCache = queryCacheProvider.apply(settings, servicesProvider.getIndicesQueryCache()); - return new IndexService(settings, environment, new SimilarityService(settings, similarities), shardStoreDeleter, analysisRegistry, engineFactory.get(), + final QueryCache queryCache = queryCacheProvider.apply(indexSettings, servicesProvider.getIndicesQueryCache()); + return new IndexService(indexSettings, environment, new SimilarityService(indexSettings, similarities), shardStoreDeleter, analysisRegistry, engineFactory.get(), servicesProvider, queryCache, store, eventListener, searcherWrapperFactory, mapperRegistry, listeners); } + } diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 6b79846d0f4..f848c706835 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -25,6 +25,7 @@ import java.nio.file.Path; import java.util.HashMap; import java.util.Iterator; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; @@ -61,7 +62,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.search.stats.SearchSlowLog; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.IndexShard; @@ -140,7 +140,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC this.engineFactory = engineFactory; // initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE this.searcherWrapper = wrapperFactory.newWrapper(this); - this.slowLog = new IndexingSlowLog(indexSettings.getSettings()); + this.slowLog = new IndexingSlowLog(indexSettings); // Add our slowLog to the incoming IndexingOperationListeners: this.listeners = new IndexingOperationListener[1+listenersIn.length]; @@ -153,7 +153,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC this.fsyncTask = null; } this.refreshTask = new AsyncRefreshTask(this); - searchSlowLog = new SearchSlowLog(indexSettings.getSettings()); + searchSlowLog = new SearchSlowLog(indexSettings); } public int numberOfShards() { @@ -566,7 +566,6 @@ public final class IndexService extends AbstractIndexComponent implements IndexC public synchronized void updateMetaData(final IndexMetaData metadata) { if (indexSettings.updateIndexMetaData(metadata)) { - final Settings settings = indexSettings.getSettings(); for (final IndexShard shard : this.shards.values()) { try { shard.onSettingsChanged(); @@ -574,22 +573,6 @@ public final class IndexService extends AbstractIndexComponent implements IndexC logger.warn("[{}] failed to notify shard about setting change", e, shard.shardId().id()); } } - try { - indexStore.onRefreshSettings(settings); - } catch (Exception e) { - logger.warn("failed to refresh index store settings", e); - } - try { - slowLog.onRefreshSettings(settings); // this will be refactored soon anyway so duplication is ok here - } catch (Exception e) { - logger.warn("failed to refresh slowlog settings", e); - } - - try { - searchSlowLog.onRefreshSettings(settings); // this will be refactored soon anyway so duplication is ok here - } catch (Exception e) { - logger.warn("failed to refresh slowlog settings", e); - } if (refreshTask.getInterval().equals(indexSettings.getRefreshInterval()) == false) { rescheduleRefreshTasks(); } @@ -687,7 +670,9 @@ public final class IndexService extends AbstractIndexComponent implements IndexC private synchronized void onTaskCompletion() { if (mustReschedule()) { - indexService.logger.trace("scheduling {} every {}", toString(), interval); + if (indexService.logger.isTraceEnabled()) { + indexService.logger.trace("scheduling {} every {}", toString(), interval); + } this.scheduledFuture = threadPool.schedule(interval, getThreadPool(), BaseAsyncTask.this); } else { indexService.logger.trace("scheduled {} disabled", toString()); @@ -715,8 +700,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC private static boolean sameException(Exception left, Exception right) { if (left.getClass() == right.getClass()) { - if ((left.getMessage() != null && left.getMessage().equals(right.getMessage())) - || left.getMessage() == right.getMessage()) { + if (Objects.equals(left.getMessage(), right.getMessage())) { StackTraceElement[] stackTraceLeft = left.getStackTrace(); StackTraceElement[] stackTraceRight = right.getStackTrace(); if (stackTraceLeft.length == stackTraceRight.length) { diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index 04c45c24a91..715bea51694 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -25,6 +25,8 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -32,45 +34,64 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.translog.Translog; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; import java.util.Locale; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; +import java.util.function.Function; import java.util.function.Predicate; /** * This class encapsulates all index level settings and handles settings updates. * It's created per index and available to all index level classes and allows them to retrieve * the latest updated settings instance. Classes that need to listen to settings updates can register - * a settings consumer at index creation via {@link IndexModule#addIndexSettingsListener(Consumer)} that will + * a settings consumer at index creation via {@link IndexModule#addSettingsUpdateConsumer(Setting, Consumer)} that will * be called for each settings update. */ public final class IndexSettings { - public static final String DEFAULT_FIELD = "index.query.default_field"; - public static final String QUERY_STRING_LENIENT = "index.query_string.lenient"; - public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard"; - public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard"; - public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields"; - public static final String INDEX_TRANSLOG_SYNC_INTERVAL = "index.translog.sync_interval"; - public static final String INDEX_TRANSLOG_DURABILITY = "index.translog.durability"; - public static final String INDEX_REFRESH_INTERVAL = "index.refresh_interval"; + public static final Setting DEFAULT_FIELD_SETTING = new Setting<>("index.query.default_field", AllFieldMapper.NAME, Function.identity(), false, Setting.Scope.INDEX); + public static final Setting QUERY_STRING_LENIENT_SETTING = Setting.boolSetting("index.query_string.lenient", false, false, Setting.Scope.INDEX); + public static final Setting QUERY_STRING_ANALYZE_WILDCARD = Setting.boolSetting("indices.query.query_string.analyze_wildcard", false, false, Setting.Scope.CLUSTER); + public static final Setting QUERY_STRING_ALLOW_LEADING_WILDCARD = Setting.boolSetting("indices.query.query_string.allowLeadingWildcard", true, false, Setting.Scope.CLUSTER); + public static final Setting ALLOW_UNMAPPED = Setting.boolSetting("index.query.parse.allow_unmapped_fields", true, false, Setting.Scope.INDEX); + public static final Setting INDEX_TRANSLOG_SYNC_INTERVAL_SETTING = Setting.timeSetting("index.translog.sync_interval", TimeValue.timeValueSeconds(5), false, Setting.Scope.INDEX); + public static final Setting INDEX_TRANSLOG_DURABILITY_SETTING = new Setting<>("index.translog.durability", Translog.Durability.REQUEST.name(), (value) -> Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)), true, Setting.Scope.INDEX); + public static final Setting INDEX_WARMER_ENABLED_SETTING = Setting.boolSetting("index.warmer.enabled", true, true, Setting.Scope.INDEX); + public static final Setting INDEX_TTL_DISABLE_PURGE_SETTING = Setting.boolSetting("index.ttl.disable_purge", false, true, Setting.Scope.INDEX); + public static final Setting INDEX_CHECK_ON_STARTUP = new Setting<>("index.shard.check_on_startup", "false", (s) -> { + switch(s) { + case "false": + case "true": + case "fix": + case "checksum": + return s; + default: + throw new IllegalArgumentException("unknown value for [index.shard.check_on_startup] must be one of [true, false, fix, checksum] but was: " + s); + } + }, false, Setting.Scope.INDEX); + + /** + * Index setting describing the maximum value of from + size on a query. + * The Default maximum value of from + size on a query is 10,000. This was chosen as + * a conservative default as it is sure to not cause trouble. Users can + * certainly profile their cluster and decide to set it to 100,000 + * safely. 1,000,000 is probably way to high for any cluster to set + * safely. + */ + public static final Setting MAX_RESULT_WINDOW_SETTING = Setting.intSetting("index.max_result_window", 10000, 1, true, Setting.Scope.INDEX); public static final TimeValue DEFAULT_REFRESH_INTERVAL = new TimeValue(1, TimeUnit.SECONDS); - public static final String INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE = "index.translog.flush_threshold_size"; - public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60); + public static final Setting INDEX_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.refresh_interval", DEFAULT_REFRESH_INTERVAL, new TimeValue(-1, TimeUnit.MILLISECONDS), true, Setting.Scope.INDEX); + public static final Setting INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING = Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), true, Setting.Scope.INDEX); + /** * Index setting to enable / disable deletes garbage collection. * This setting is realtime updateable */ - public static final String INDEX_GC_DELETES_SETTING = "index.gc_deletes"; + public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60); + public static final Setting INDEX_GC_DELETES_SETTING = Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MILLISECONDS), true, Setting.Scope.INDEX); private final String uuid; - private final List> updateListeners; private final Index index; private final Version version; private final ESLogger logger; @@ -94,9 +115,11 @@ public final class IndexSettings { private volatile ByteSizeValue flushThresholdSize; private final MergeSchedulerConfig mergeSchedulerConfig; private final MergePolicyConfig mergePolicyConfig; - + private final IndexScopedSettings scopedSettings; private long gcDeletesInMillis = DEFAULT_GC_DELETES.millis(); - + private volatile boolean warmerEnabled; + private volatile int maxResultWindow; + private volatile boolean TTLPurgeDisabled; /** * Returns the default search field for this index. @@ -139,10 +162,13 @@ public final class IndexSettings { * * @param indexMetaData the index metadata this settings object is associated with * @param nodeSettings the nodes settings this index is allocated on. - * @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated */ - public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection> updateListeners) { - this(indexMetaData, nodeSettings, updateListeners, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex())); + public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings) { + this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + } + + IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, IndexScopedSettings indexScopedSettings) { + this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()), indexScopedSettings); } /** @@ -151,13 +177,12 @@ public final class IndexSettings { * * @param indexMetaData the index metadata this settings object is associated with * @param nodeSettings the nodes settings this index is allocated on. - * @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated * @param indexNameMatcher a matcher that can resolve an expression to the index name or index alias */ - public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection> updateListeners, final Predicate indexNameMatcher) { + public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Predicate indexNameMatcher, IndexScopedSettings indexScopedSettings) { + scopedSettings = indexScopedSettings.copy(nodeSettings, indexMetaData); this.nodeSettings = nodeSettings; this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build(); - this.updateListeners = Collections.unmodifiableList( new ArrayList<>(updateListeners)); this.index = new Index(indexMetaData.getIndex()); version = Version.indexCreated(settings); uuid = settings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); @@ -167,32 +192,44 @@ public final class IndexSettings { numberOfShards = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null); isShadowReplicaIndex = IndexMetaData.isIndexUsingShadowReplicas(settings); - this.defaultField = settings.get(DEFAULT_FIELD, AllFieldMapper.NAME); - this.queryStringLenient = settings.getAsBoolean(QUERY_STRING_LENIENT, false); - this.queryStringAnalyzeWildcard = settings.getAsBoolean(QUERY_STRING_ANALYZE_WILDCARD, false); - this.queryStringAllowLeadingWildcard = settings.getAsBoolean(QUERY_STRING_ALLOW_LEADING_WILDCARD, true); + this.defaultField = DEFAULT_FIELD_SETTING.get(settings); + this.queryStringLenient = QUERY_STRING_LENIENT_SETTING.get(settings); + this.queryStringAnalyzeWildcard = QUERY_STRING_ANALYZE_WILDCARD.get(nodeSettings); + this.queryStringAllowLeadingWildcard = QUERY_STRING_ALLOW_LEADING_WILDCARD.get(nodeSettings); this.parseFieldMatcher = new ParseFieldMatcher(settings); - this.defaultAllowUnmappedFields = settings.getAsBoolean(ALLOW_UNMAPPED, true); + this.defaultAllowUnmappedFields = scopedSettings.get(ALLOW_UNMAPPED); this.indexNameMatcher = indexNameMatcher; - final String value = settings.get(INDEX_TRANSLOG_DURABILITY, Translog.Durability.REQUEST.name()); - this.durability = getFromSettings(settings, Translog.Durability.REQUEST); - syncInterval = settings.getAsTime(INDEX_TRANSLOG_SYNC_INTERVAL, TimeValue.timeValueSeconds(5)); - refreshInterval = settings.getAsTime(INDEX_REFRESH_INTERVAL, DEFAULT_REFRESH_INTERVAL); - flushThresholdSize = settings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB)); - mergeSchedulerConfig = new MergeSchedulerConfig(settings); - gcDeletesInMillis = settings.getAsTime(IndexSettings.INDEX_GC_DELETES_SETTING, DEFAULT_GC_DELETES).getMillis(); - this.mergePolicyConfig = new MergePolicyConfig(logger, settings); + this.durability = scopedSettings.get(INDEX_TRANSLOG_DURABILITY_SETTING); + scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_DURABILITY_SETTING, this::setTranslogDurability); + syncInterval = INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.get(settings); + refreshInterval = scopedSettings.get(INDEX_REFRESH_INTERVAL_SETTING); + scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval); + flushThresholdSize = scopedSettings.get(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING); + scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING, this::setTranslogFlushThresholdSize); + mergeSchedulerConfig = new MergeSchedulerConfig(this); + scopedSettings.addSettingsUpdateConsumer(INDEX_GC_DELETES_SETTING, this::setGCDeletes); + gcDeletesInMillis = scopedSettings.get(INDEX_GC_DELETES_SETTING).getMillis(); + warmerEnabled = scopedSettings.get(INDEX_WARMER_ENABLED_SETTING); + scopedSettings.addSettingsUpdateConsumer(INDEX_WARMER_ENABLED_SETTING, this::setEnableWarmer); + maxResultWindow = scopedSettings.get(MAX_RESULT_WINDOW_SETTING); + scopedSettings.addSettingsUpdateConsumer(MAX_RESULT_WINDOW_SETTING, this::setMaxResultWindow); + TTLPurgeDisabled = scopedSettings.get(INDEX_TTL_DISABLE_PURGE_SETTING); + scopedSettings.addSettingsUpdateConsumer(INDEX_TTL_DISABLE_PURGE_SETTING, this::setTTLPurgeDisabled); + this.mergePolicyConfig = new MergePolicyConfig(logger, this); assert indexNameMatcher.test(indexMetaData.getIndex()); + } + private void setTranslogFlushThresholdSize(ByteSizeValue byteSizeValue) { + this.flushThresholdSize = byteSizeValue; + } - /** - * Creates a new {@link IndexSettings} instance adding the given listeners to the settings - */ - IndexSettings newWithListener(final Collection> updateListeners) { - ArrayList> newUpdateListeners = new ArrayList<>(updateListeners); - newUpdateListeners.addAll(this.updateListeners); - return new IndexSettings(indexMetaData, nodeSettings, newUpdateListeners, indexNameMatcher); + private void setGCDeletes(TimeValue timeValue) { + this.gcDeletesInMillis = timeValue.getMillis(); + } + + private void setRefreshInterval(TimeValue timeValue) { + this.refreshInterval = timeValue; } /** @@ -321,33 +358,15 @@ public final class IndexSettings { } this.indexMetaData = indexMetaData; final Settings existingSettings = this.settings; - if (existingSettings.getByPrefix(IndexMetaData.INDEX_SETTING_PREFIX).getAsMap().equals(newSettings.getByPrefix(IndexMetaData.INDEX_SETTING_PREFIX).getAsMap())) { + if (existingSettings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE).getAsMap().equals(newSettings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE).getAsMap())) { // nothing to update, same settings return false; } - final Settings mergedSettings = this.settings = Settings.builder().put(nodeSettings).put(newSettings).build(); - for (final Consumer consumer : updateListeners) { - try { - consumer.accept(mergedSettings); - } catch (Exception e) { - logger.warn("failed to refresh index settings for [{}]", e, mergedSettings); - } - } - try { - updateSettings(mergedSettings); - } catch (Exception e) { - logger.warn("failed to refresh index settings for [{}]", e, mergedSettings); - } + scopedSettings.applySettings(newSettings); + this.settings = Settings.builder().put(nodeSettings).put(newSettings).build(); return true; } - /** - * Returns all settings update consumers - */ - List> getUpdateListeners() { // for testing - return updateListeners; - } - /** * Returns the translog durability for this index. */ @@ -355,60 +374,19 @@ public final class IndexSettings { return durability; } - private Translog.Durability getFromSettings(Settings settings, Translog.Durability defaultValue) { - final String value = settings.get(INDEX_TRANSLOG_DURABILITY, defaultValue.name()); - try { - return Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)); - } catch (IllegalArgumentException ex) { - logger.warn("Can't apply {} illegal value: {} using {} instead, use one of: {}", INDEX_TRANSLOG_DURABILITY, value, defaultValue, Arrays.toString(Translog.Durability.values())); - return defaultValue; - } + private void setTranslogDurability(Translog.Durability durability) { + this.durability = durability; } - private void updateSettings(Settings settings) { - final Translog.Durability durability = getFromSettings(settings, this.durability); - if (durability != this.durability) { - logger.info("updating durability from [{}] to [{}]", this.durability, durability); - this.durability = durability; - } + /** + * Returns true if index warmers are enabled, otherwise false + */ + public boolean isWarmerEnabled() { + return warmerEnabled; + } - TimeValue refreshInterval = settings.getAsTime(IndexSettings.INDEX_REFRESH_INTERVAL, this.refreshInterval); - if (!refreshInterval.equals(this.refreshInterval)) { - logger.info("updating refresh_interval from [{}] to [{}]", this.refreshInterval, refreshInterval); - this.refreshInterval = refreshInterval; - } - - ByteSizeValue flushThresholdSize = settings.getAsBytesSize(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, this.flushThresholdSize); - if (!flushThresholdSize.equals(this.flushThresholdSize)) { - logger.info("updating flush_threshold_size from [{}] to [{}]", this.flushThresholdSize, flushThresholdSize); - this.flushThresholdSize = flushThresholdSize; - } - - final int maxThreadCount = settings.getAsInt(MergeSchedulerConfig.MAX_THREAD_COUNT, mergeSchedulerConfig.getMaxThreadCount()); - if (maxThreadCount != mergeSchedulerConfig.getMaxThreadCount()) { - logger.info("updating [{}] from [{}] to [{}]", MergeSchedulerConfig.MAX_THREAD_COUNT, mergeSchedulerConfig.getMaxMergeCount(), maxThreadCount); - mergeSchedulerConfig.setMaxThreadCount(maxThreadCount); - } - - final int maxMergeCount = settings.getAsInt(MergeSchedulerConfig.MAX_MERGE_COUNT, mergeSchedulerConfig.getMaxMergeCount()); - if (maxMergeCount != mergeSchedulerConfig.getMaxMergeCount()) { - logger.info("updating [{}] from [{}] to [{}]", MergeSchedulerConfig.MAX_MERGE_COUNT, mergeSchedulerConfig.getMaxMergeCount(), maxMergeCount); - mergeSchedulerConfig.setMaxMergeCount(maxMergeCount); - } - - final boolean autoThrottle = settings.getAsBoolean(MergeSchedulerConfig.AUTO_THROTTLE, mergeSchedulerConfig.isAutoThrottle()); - if (autoThrottle != mergeSchedulerConfig.isAutoThrottle()) { - logger.info("updating [{}] from [{}] to [{}]", MergeSchedulerConfig.AUTO_THROTTLE, mergeSchedulerConfig.isAutoThrottle(), autoThrottle); - mergeSchedulerConfig.setAutoThrottle(autoThrottle); - } - - long gcDeletesInMillis = settings.getAsTime(IndexSettings.INDEX_GC_DELETES_SETTING, TimeValue.timeValueMillis(this.gcDeletesInMillis)).getMillis(); - if (gcDeletesInMillis != this.gcDeletesInMillis) { - logger.info("updating {} from [{}] to [{}]", IndexSettings.INDEX_GC_DELETES_SETTING, TimeValue.timeValueMillis(this.gcDeletesInMillis), TimeValue.timeValueMillis(gcDeletesInMillis)); - this.gcDeletesInMillis = gcDeletesInMillis; - } - - mergePolicyConfig.onRefreshSettings(settings); + private void setEnableWarmer(boolean enableWarmer) { + this.warmerEnabled = enableWarmer; } /** @@ -436,6 +414,18 @@ public final class IndexSettings { */ public MergeSchedulerConfig getMergeSchedulerConfig() { return mergeSchedulerConfig; } + /** + * Returns the max result window for search requests, describing the maximum value of from + size on a query. + */ + public int getMaxResultWindow() { + return this.maxResultWindow; + } + + private void setMaxResultWindow(int maxResultWindow) { + this.maxResultWindow = maxResultWindow; + } + + /** * Returns the GC deletes cycle in milliseconds. */ @@ -450,4 +440,22 @@ public final class IndexSettings { return mergePolicyConfig.getMergePolicy(); } + /** + * Returns true if the TTL purge is disabled for this index. Default is false + */ + public boolean isTTLPurgeDisabled() { + return TTLPurgeDisabled; + } + + private void setTTLPurgeDisabled(boolean ttlPurgeDisabled) { + this.TTLPurgeDisabled = ttlPurgeDisabled; + } + + + public T getValue(Setting setting) { + return scopedSettings.get(setting); + } + + + public IndexScopedSettings getScopedSettings() { return scopedSettings;} } diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index 5cd3685b2f8..5452daa7f07 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.engine.Engine; @@ -31,15 +31,12 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.shard.IndexingOperationListener; import java.io.IOException; -import java.util.Locale; import java.util.concurrent.TimeUnit; /** */ public final class IndexingSlowLog implements IndexingOperationListener { - private boolean reformat; - private long indexWarnThreshold; private long indexInfoThreshold; private long indexDebugThreshold; @@ -51,21 +48,33 @@ public final class IndexingSlowLog implements IndexingOperationListener { */ private int maxSourceCharsToLog; - private String level; + private SlowLogLevel level; private final ESLogger indexLogger; private final ESLogger deleteLogger; private static final String INDEX_INDEXING_SLOWLOG_PREFIX = "index.indexing.slowlog"; - public static final String INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN = INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.warn"; - public static final String INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO = INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.info"; - public static final String INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG = INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.debug"; - public static final String INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE = INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.trace"; - public static final String INDEX_INDEXING_SLOWLOG_REFORMAT = INDEX_INDEXING_SLOWLOG_PREFIX +".reformat"; - public static final String INDEX_INDEXING_SLOWLOG_LEVEL = INDEX_INDEXING_SLOWLOG_PREFIX +".level"; - public static final String INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG = INDEX_INDEXING_SLOWLOG_PREFIX + ".source"; + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.warn", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.info", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.debug", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.trace", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING = Setting.boolSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".reformat", true, true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_LEVEL_SETTING = new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX +".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, true, Setting.Scope.INDEX); + /** + * Reads how much of the source to log. The user can specify any value they + * like and numbers are interpreted the maximum number of characters to log + * and everything else is interpreted as Elasticsearch interprets booleans + * which is then converted to 0 for false and Integer.MAX_VALUE for true. + */ + public static final Setting INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING = new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX + ".source", "1000", (value) -> { + try { + return Integer.parseInt(value, 10); + } catch (NumberFormatException e) { + return Booleans.parseBoolean(value, true) ? Integer.MAX_VALUE : 0; + } + }, true, Setting.Scope.INDEX); - IndexingSlowLog(Settings indexSettings) { + IndexingSlowLog(IndexSettings indexSettings) { this(indexSettings, Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index"), Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".delete")); } @@ -73,77 +82,62 @@ public final class IndexingSlowLog implements IndexingOperationListener { /** * Build with the specified loggers. Only used to testing. */ - IndexingSlowLog(Settings indexSettings, ESLogger indexLogger, ESLogger deleteLogger) { + IndexingSlowLog(IndexSettings indexSettings, ESLogger indexLogger, ESLogger deleteLogger) { this.indexLogger = indexLogger; this.deleteLogger = deleteLogger; - this.reformat = indexSettings.getAsBoolean(INDEX_INDEXING_SLOWLOG_REFORMAT, true); - this.indexWarnThreshold = indexSettings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN, TimeValue.timeValueNanos(-1)).nanos(); - this.indexInfoThreshold = indexSettings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO, TimeValue.timeValueNanos(-1)).nanos(); - this.indexDebugThreshold = indexSettings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG, TimeValue.timeValueNanos(-1)).nanos(); - this.indexTraceThreshold = indexSettings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE, TimeValue.timeValueNanos(-1)).nanos(); - this.level = indexSettings.get(INDEX_INDEXING_SLOWLOG_LEVEL, "TRACE").toUpperCase(Locale.ROOT); - this.maxSourceCharsToLog = readSourceToLog(indexSettings); - - indexLogger.setLevel(level); - deleteLogger.setLevel(level); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, this::setReformat); + this.reformat = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING, this::setWarnThreshold); + this.indexWarnThreshold = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING, this::setInfoThreshold); + this.indexInfoThreshold = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING, this::setDebugThreshold); + this.indexDebugThreshold = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING, this::setTraceThreshold); + this.indexTraceThreshold = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_LEVEL_SETTING, this::setLevel); + setLevel(indexSettings.getValue(INDEX_INDEXING_SLOWLOG_LEVEL_SETTING)); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING, this::setMaxSourceCharsToLog); + this.maxSourceCharsToLog = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING); } - synchronized void onRefreshSettings(Settings settings) { - long indexWarnThreshold = settings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN, TimeValue.timeValueNanos(this.indexWarnThreshold)).nanos(); - if (indexWarnThreshold != this.indexWarnThreshold) { - this.indexWarnThreshold = indexWarnThreshold; - } - long indexInfoThreshold = settings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO, TimeValue.timeValueNanos(this.indexInfoThreshold)).nanos(); - if (indexInfoThreshold != this.indexInfoThreshold) { - this.indexInfoThreshold = indexInfoThreshold; - } - long indexDebugThreshold = settings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG, TimeValue.timeValueNanos(this.indexDebugThreshold)).nanos(); - if (indexDebugThreshold != this.indexDebugThreshold) { - this.indexDebugThreshold = indexDebugThreshold; - } - long indexTraceThreshold = settings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE, TimeValue.timeValueNanos(this.indexTraceThreshold)).nanos(); - if (indexTraceThreshold != this.indexTraceThreshold) { - this.indexTraceThreshold = indexTraceThreshold; - } - - String level = settings.get(INDEX_INDEXING_SLOWLOG_LEVEL, this.level); - if (!level.equals(this.level)) { - this.indexLogger.setLevel(level.toUpperCase(Locale.ROOT)); - this.deleteLogger.setLevel(level.toUpperCase(Locale.ROOT)); - this.level = level; - } - - boolean reformat = settings.getAsBoolean(INDEX_INDEXING_SLOWLOG_REFORMAT, this.reformat); - if (reformat != this.reformat) { - this.reformat = reformat; - } - - int maxSourceCharsToLog = readSourceToLog(settings); - if (maxSourceCharsToLog != this.maxSourceCharsToLog) { - this.maxSourceCharsToLog = maxSourceCharsToLog; - } + private void setMaxSourceCharsToLog(int maxSourceCharsToLog) { + this.maxSourceCharsToLog = maxSourceCharsToLog; } + private void setLevel(SlowLogLevel level) { + this.level = level; + this.indexLogger.setLevel(level.name()); + this.deleteLogger.setLevel(level.name()); + } + + private void setWarnThreshold(TimeValue warnThreshold) { + this.indexWarnThreshold = warnThreshold.nanos(); + } + + private void setInfoThreshold(TimeValue infoThreshold) { + this.indexInfoThreshold = infoThreshold.nanos(); + } + + private void setDebugThreshold(TimeValue debugThreshold) { + this.indexDebugThreshold = debugThreshold.nanos(); + } + + private void setTraceThreshold(TimeValue traceThreshold) { + this.indexTraceThreshold = traceThreshold.nanos(); + } + + private void setReformat(boolean reformat) { + this.reformat = reformat; + } + + public void postIndex(Engine.Index index) { final long took = index.endTime() - index.startTime(); postIndexing(index.parsedDoc(), took); } - /** - * Reads how much of the source to log. The user can specify any value they - * like and numbers are interpreted the maximum number of characters to log - * and everything else is interpreted as Elasticsearch interprets booleans - * which is then converted to 0 for false and Integer.MAX_VALUE for true. - */ - private int readSourceToLog(Settings settings) { - String sourceToLog = settings.get(INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG, "1000"); - try { - return Integer.parseInt(sourceToLog, 10); - } catch (NumberFormatException e) { - return Booleans.parseBoolean(sourceToLog, true) ? Integer.MAX_VALUE : 0; - } - } private void postIndexing(ParsedDocument doc, long tookInNanos) { if (indexWarnThreshold >= 0 && tookInNanos > indexWarnThreshold) { @@ -194,4 +188,33 @@ public final class IndexingSlowLog implements IndexingOperationListener { return sb.toString(); } } + + boolean isReformat() { + return reformat; + } + + long getIndexWarnThreshold() { + return indexWarnThreshold; + } + + long getIndexInfoThreshold() { + return indexInfoThreshold; + } + + long getIndexTraceThreshold() { + return indexTraceThreshold; + } + + long getIndexDebugThreshold() { + return indexDebugThreshold; + } + + int getMaxSourceCharsToLog() { + return maxSourceCharsToLog; + } + + SlowLogLevel getLevel() { + return level; + } + } diff --git a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java index d3b90b131f5..362e9099ee2 100644 --- a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java +++ b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java @@ -23,7 +23,7 @@ import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.TieredMergePolicy; import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -117,7 +117,6 @@ public final class MergePolicyConfig { private final TieredMergePolicy mergePolicy = new TieredMergePolicy(); private final ESLogger logger; private final boolean mergesEnabled; - private volatile double noCFSRatio; public static final double DEFAULT_EXPUNGE_DELETES_ALLOWED = 10d; public static final ByteSizeValue DEFAULT_FLOOR_SEGMENT = new ByteSizeValue(2, ByteSizeUnit.MB); @@ -126,35 +125,42 @@ public final class MergePolicyConfig { public static final ByteSizeValue DEFAULT_MAX_MERGED_SEGMENT = new ByteSizeValue(5, ByteSizeUnit.GB); public static final double DEFAULT_SEGMENTS_PER_TIER = 10.0d; public static final double DEFAULT_RECLAIM_DELETES_WEIGHT = 2.0d; - public static final String INDEX_COMPOUND_FORMAT = "index.compound_format"; + public static final Setting INDEX_COMPOUND_FORMAT_SETTING = new Setting<>("index.compound_format", Double.toString(TieredMergePolicy.DEFAULT_NO_CFS_RATIO), MergePolicyConfig::parseNoCFSRatio, true, Setting.Scope.INDEX); - public static final String INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED = "index.merge.policy.expunge_deletes_allowed"; - public static final String INDEX_MERGE_POLICY_FLOOR_SEGMENT = "index.merge.policy.floor_segment"; - public static final String INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE = "index.merge.policy.max_merge_at_once"; - public static final String INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT = "index.merge.policy.max_merge_at_once_explicit"; - public static final String INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT = "index.merge.policy.max_merged_segment"; - public static final String INDEX_MERGE_POLICY_SEGMENTS_PER_TIER = "index.merge.policy.segments_per_tier"; - public static final String INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT = "index.merge.policy.reclaim_deletes_weight"; - public static final String INDEX_MERGE_ENABLED = "index.merge.enabled"; + public static final Setting INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING = Setting.doubleSetting("index.merge.policy.expunge_deletes_allowed", DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING = Setting.byteSizeSetting("index.merge.policy.floor_segment", DEFAULT_FLOOR_SEGMENT, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING = Setting.intSetting("index.merge.policy.max_merge_at_once", DEFAULT_MAX_MERGE_AT_ONCE, 2, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING = Setting.intSetting("index.merge.policy.max_merge_at_once_explicit", DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT, 2, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING = Setting.byteSizeSetting("index.merge.policy.max_merged_segment", DEFAULT_MAX_MERGED_SEGMENT, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING = Setting.doubleSetting("index.merge.policy.segments_per_tier", DEFAULT_SEGMENTS_PER_TIER, 2.0d, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING = Setting.doubleSetting("index.merge.policy.reclaim_deletes_weight", DEFAULT_RECLAIM_DELETES_WEIGHT, 0.0d, true, Setting.Scope.INDEX); + public static final String INDEX_MERGE_ENABLED = "index.merge.enabled"; // don't convert to Setting<> and register... we only set this in tests and register via a plugin - MergePolicyConfig(ESLogger logger, Settings indexSettings) { + MergePolicyConfig(ESLogger logger, IndexSettings indexSettings) { this.logger = logger; - this.noCFSRatio = parseNoCFSRatio(indexSettings.get(INDEX_COMPOUND_FORMAT, Double.toString(TieredMergePolicy.DEFAULT_NO_CFS_RATIO))); - double forceMergeDeletesPctAllowed = indexSettings.getAsDouble("index.merge.policy.expunge_deletes_allowed", DEFAULT_EXPUNGE_DELETES_ALLOWED); // percentage - ByteSizeValue floorSegment = indexSettings.getAsBytesSize("index.merge.policy.floor_segment", DEFAULT_FLOOR_SEGMENT); - int maxMergeAtOnce = indexSettings.getAsInt("index.merge.policy.max_merge_at_once", DEFAULT_MAX_MERGE_AT_ONCE); - int maxMergeAtOnceExplicit = indexSettings.getAsInt("index.merge.policy.max_merge_at_once_explicit", DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_COMPOUND_FORMAT_SETTING, this::setNoCFSRatio); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, this::expungeDeletesAllowed); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING, this::floorSegmentSetting); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING, this::maxMergesAtOnce); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING, this::maxMergesAtOnceExplicit); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING, this::maxMergedSegment); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING, this::segmentsPerTier); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING, this::reclaimDeletesWeight); + double forceMergeDeletesPctAllowed = indexSettings.getValue(INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING); // percentage + ByteSizeValue floorSegment = indexSettings.getValue(INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING); + int maxMergeAtOnce = indexSettings.getValue(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING); + int maxMergeAtOnceExplicit = indexSettings.getValue(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING); // TODO is this really a good default number for max_merge_segment, what happens for large indices, won't they end up with many segments? - ByteSizeValue maxMergedSegment = indexSettings.getAsBytesSize("index.merge.policy.max_merged_segment", DEFAULT_MAX_MERGED_SEGMENT); - double segmentsPerTier = indexSettings.getAsDouble("index.merge.policy.segments_per_tier", DEFAULT_SEGMENTS_PER_TIER); - double reclaimDeletesWeight = indexSettings.getAsDouble("index.merge.policy.reclaim_deletes_weight", DEFAULT_RECLAIM_DELETES_WEIGHT); - this.mergesEnabled = indexSettings.getAsBoolean(INDEX_MERGE_ENABLED, true); + ByteSizeValue maxMergedSegment = indexSettings.getValue(INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING); + double segmentsPerTier = indexSettings.getValue(INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING); + double reclaimDeletesWeight = indexSettings.getValue(INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING); + this.mergesEnabled = indexSettings.getSettings().getAsBoolean(INDEX_MERGE_ENABLED, true); if (mergesEnabled == false) { logger.warn("[{}] is set to false, this should only be used in tests and can cause serious problems in production environments", INDEX_MERGE_ENABLED); } maxMergeAtOnce = adjustMaxMergeAtOnceIfNeeded(maxMergeAtOnce, segmentsPerTier); - mergePolicy.setNoCFSRatio(noCFSRatio); + mergePolicy.setNoCFSRatio(indexSettings.getValue(INDEX_COMPOUND_FORMAT_SETTING)); mergePolicy.setForceMergeDeletesPctAllowed(forceMergeDeletesPctAllowed); mergePolicy.setFloorSegmentMB(floorSegment.mbFrac()); mergePolicy.setMaxMergeAtOnce(maxMergeAtOnce); @@ -166,6 +172,38 @@ public final class MergePolicyConfig { forceMergeDeletesPctAllowed, floorSegment, maxMergeAtOnce, maxMergeAtOnceExplicit, maxMergedSegment, segmentsPerTier, reclaimDeletesWeight); } + private void reclaimDeletesWeight(Double reclaimDeletesWeight) { + mergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight); + } + + private void segmentsPerTier(Double segmentsPerTier) { + mergePolicy.setSegmentsPerTier(segmentsPerTier); + } + + private void maxMergedSegment(ByteSizeValue maxMergedSegment) { + mergePolicy.setMaxMergedSegmentMB(maxMergedSegment.mbFrac()); + } + + private void maxMergesAtOnceExplicit(Integer maxMergeAtOnceExplicit) { + mergePolicy.setMaxMergeAtOnceExplicit(maxMergeAtOnceExplicit); + } + + private void maxMergesAtOnce(Integer maxMergeAtOnce) { + mergePolicy.setMaxMergeAtOnce(maxMergeAtOnce); + } + + private void floorSegmentSetting(ByteSizeValue floorSegementSetting) { + mergePolicy.setFloorSegmentMB(floorSegementSetting.mbFrac()); + } + + private void expungeDeletesAllowed(Double value) { + mergePolicy.setForceMergeDeletesPctAllowed(value); + } + + private void setNoCFSRatio(Double noCFSRatio) { + mergePolicy.setNoCFSRatio(noCFSRatio); + } + private int adjustMaxMergeAtOnceIfNeeded(int maxMergeAtOnce, double segmentsPerTier) { // fixing maxMergeAtOnce, see TieredMergePolicy#setMaxMergeAtOnce if (!(segmentsPerTier >= maxMergeAtOnce)) { @@ -184,65 +222,6 @@ public final class MergePolicyConfig { return mergesEnabled ? mergePolicy : NoMergePolicy.INSTANCE; } - void onRefreshSettings(Settings settings) { - final double oldExpungeDeletesPctAllowed = mergePolicy.getForceMergeDeletesPctAllowed(); - final double expungeDeletesPctAllowed = settings.getAsDouble(INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED, oldExpungeDeletesPctAllowed); - if (expungeDeletesPctAllowed != oldExpungeDeletesPctAllowed) { - logger.info("updating [expunge_deletes_allowed] from [{}] to [{}]", oldExpungeDeletesPctAllowed, expungeDeletesPctAllowed); - mergePolicy.setForceMergeDeletesPctAllowed(expungeDeletesPctAllowed); - } - - final double oldFloorSegmentMB = mergePolicy.getFloorSegmentMB(); - final ByteSizeValue floorSegment = settings.getAsBytesSize(INDEX_MERGE_POLICY_FLOOR_SEGMENT, null); - if (floorSegment != null && floorSegment.mbFrac() != oldFloorSegmentMB) { - logger.info("updating [floor_segment] from [{}mb] to [{}]", oldFloorSegmentMB, floorSegment); - mergePolicy.setFloorSegmentMB(floorSegment.mbFrac()); - } - - final double oldSegmentsPerTier = mergePolicy.getSegmentsPerTier(); - final double segmentsPerTier = settings.getAsDouble(INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, oldSegmentsPerTier); - if (segmentsPerTier != oldSegmentsPerTier) { - logger.info("updating [segments_per_tier] from [{}] to [{}]", oldSegmentsPerTier, segmentsPerTier); - mergePolicy.setSegmentsPerTier(segmentsPerTier); - } - - final int oldMaxMergeAtOnce = mergePolicy.getMaxMergeAtOnce(); - int maxMergeAtOnce = settings.getAsInt(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, oldMaxMergeAtOnce); - if (maxMergeAtOnce != oldMaxMergeAtOnce) { - logger.info("updating [max_merge_at_once] from [{}] to [{}]", oldMaxMergeAtOnce, maxMergeAtOnce); - maxMergeAtOnce = adjustMaxMergeAtOnceIfNeeded(maxMergeAtOnce, segmentsPerTier); - mergePolicy.setMaxMergeAtOnce(maxMergeAtOnce); - } - - final int oldMaxMergeAtOnceExplicit = mergePolicy.getMaxMergeAtOnceExplicit(); - final int maxMergeAtOnceExplicit = settings.getAsInt(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT, oldMaxMergeAtOnceExplicit); - if (maxMergeAtOnceExplicit != oldMaxMergeAtOnceExplicit) { - logger.info("updating [max_merge_at_once_explicit] from [{}] to [{}]", oldMaxMergeAtOnceExplicit, maxMergeAtOnceExplicit); - mergePolicy.setMaxMergeAtOnceExplicit(maxMergeAtOnceExplicit); - } - - final double oldMaxMergedSegmentMB = mergePolicy.getMaxMergedSegmentMB(); - final ByteSizeValue maxMergedSegment = settings.getAsBytesSize(INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT, null); - if (maxMergedSegment != null && maxMergedSegment.mbFrac() != oldMaxMergedSegmentMB) { - logger.info("updating [max_merged_segment] from [{}mb] to [{}]", oldMaxMergedSegmentMB, maxMergedSegment); - mergePolicy.setMaxMergedSegmentMB(maxMergedSegment.mbFrac()); - } - - final double oldReclaimDeletesWeight = mergePolicy.getReclaimDeletesWeight(); - final double reclaimDeletesWeight = settings.getAsDouble(INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, oldReclaimDeletesWeight); - if (reclaimDeletesWeight != oldReclaimDeletesWeight) { - logger.info("updating [reclaim_deletes_weight] from [{}] to [{}]", oldReclaimDeletesWeight, reclaimDeletesWeight); - mergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight); - } - - double noCFSRatio = parseNoCFSRatio(settings.get(INDEX_COMPOUND_FORMAT, Double.toString(MergePolicyConfig.this.noCFSRatio))); - if (noCFSRatio != MergePolicyConfig.this.noCFSRatio) { - logger.info("updating index.compound_format from [{}] to [{}]", formatNoCFSRatio(MergePolicyConfig.this.noCFSRatio), formatNoCFSRatio(noCFSRatio)); - mergePolicy.setNoCFSRatio(noCFSRatio); - MergePolicyConfig.this.noCFSRatio = noCFSRatio; - } - } - private static double parseNoCFSRatio(String noCFSRatio) { noCFSRatio = noCFSRatio.trim(); if (noCFSRatio.equalsIgnoreCase("true")) { @@ -261,14 +240,4 @@ public final class MergePolicyConfig { } } } - - private static String formatNoCFSRatio(double ratio) { - if (ratio == 1.0) { - return Boolean.TRUE.toString(); - } else if (ratio == 0.0) { - return Boolean.FALSE.toString(); - } else { - return Double.toString(ratio); - } - } } diff --git a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java index ad6e2ec5d17..59576f1869b 100644 --- a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java +++ b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java @@ -20,6 +20,7 @@ package org.elasticsearch.index; import org.apache.lucene.index.ConcurrentMergeScheduler; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.index.IndexSettings; @@ -52,18 +53,21 @@ import org.elasticsearch.index.IndexSettings; */ public final class MergeSchedulerConfig { - public static final String MAX_THREAD_COUNT = "index.merge.scheduler.max_thread_count"; - public static final String MAX_MERGE_COUNT = "index.merge.scheduler.max_merge_count"; - public static final String AUTO_THROTTLE = "index.merge.scheduler.auto_throttle"; + public static final Setting MAX_THREAD_COUNT_SETTING = new Setting<>("index.merge.scheduler.max_thread_count", (s) -> Integer.toString(Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(s) / 2))), (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_thread_count"), true, Setting.Scope.INDEX); + public static final Setting MAX_MERGE_COUNT_SETTING = new Setting<>("index.merge.scheduler.max_merge_count", (s) -> Integer.toString(MAX_THREAD_COUNT_SETTING.get(s) + 5), (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_merge_count"), true, Setting.Scope.INDEX); + public static final Setting AUTO_THROTTLE_SETTING = Setting.boolSetting("index.merge.scheduler.auto_throttle", true, true, Setting.Scope.INDEX); private volatile boolean autoThrottle; private volatile int maxThreadCount; private volatile int maxMergeCount; - MergeSchedulerConfig(Settings settings) { - maxThreadCount = settings.getAsInt(MAX_THREAD_COUNT, Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(settings) / 2))); - maxMergeCount = settings.getAsInt(MAX_MERGE_COUNT, maxThreadCount + 5); - this.autoThrottle = settings.getAsBoolean(AUTO_THROTTLE, true); + MergeSchedulerConfig(IndexSettings indexSettings) { + indexSettings.getScopedSettings().addSettingsUpdateConsumer(MAX_THREAD_COUNT_SETTING, this::setMaxThreadCount); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(MAX_MERGE_COUNT_SETTING, this::setMaxMergeCount); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(AUTO_THROTTLE_SETTING, this::setAutoThrottle); + maxThreadCount = indexSettings.getValue(MAX_THREAD_COUNT_SETTING); + maxMergeCount = indexSettings.getValue(MAX_MERGE_COUNT_SETTING); + this.autoThrottle = indexSettings.getValue(AUTO_THROTTLE_SETTING); } /** diff --git a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java new file mode 100644 index 00000000000..200aa1cc29a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -0,0 +1,238 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.search.internal.SearchContext; + +import java.util.concurrent.TimeUnit; + +/** + */ +public final class SearchSlowLog { + + private boolean reformat; + + private long queryWarnThreshold; + private long queryInfoThreshold; + private long queryDebugThreshold; + private long queryTraceThreshold; + + private long fetchWarnThreshold; + private long fetchInfoThreshold; + private long fetchDebugThreshold; + private long fetchTraceThreshold; + + private SlowLogLevel level; + + private final ESLogger queryLogger; + private final ESLogger fetchLogger; + + private static final String INDEX_SEARCH_SLOWLOG_PREFIX = "index.search.slowlog"; + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.warn", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.info", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.debug", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.trace", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.warn", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.info", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.debug", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.trace", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_REFORMAT = Setting.boolSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".reformat", true, true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_LEVEL = new Setting<>(INDEX_SEARCH_SLOWLOG_PREFIX + ".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, true, Setting.Scope.INDEX); + + public SearchSlowLog(IndexSettings indexSettings) { + + this.queryLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query"); + this.fetchLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch"); + + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_REFORMAT, this::setReformat); + this.reformat = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_REFORMAT); + + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING, this::setQueryWarnThreshold); + this.queryWarnThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING, this::setQueryInfoThreshold); + this.queryInfoThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING, this::setQueryDebugThreshold); + this.queryDebugThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING, this::setQueryTraceThreshold); + this.queryTraceThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING).nanos(); + + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING, this::setFetchWarnThreshold); + this.fetchWarnThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING, this::setFetchInfoThreshold); + this.fetchInfoThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING, this::setFetchDebugThreshold); + this.fetchDebugThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING, this::setFetchTraceThreshold); + this.fetchTraceThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING).nanos(); + + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_LEVEL, this::setLevel); + setLevel(indexSettings.getValue(INDEX_SEARCH_SLOWLOG_LEVEL)); + } + + private void setLevel(SlowLogLevel level) { + this.level = level; + this.queryLogger.setLevel(level.name()); + this.fetchLogger.setLevel(level.name()); + } + + public void onQueryPhase(SearchContext context, long tookInNanos) { + if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold) { + queryLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (queryInfoThreshold >= 0 && tookInNanos > queryInfoThreshold) { + queryLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (queryDebugThreshold >= 0 && tookInNanos > queryDebugThreshold) { + queryLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (queryTraceThreshold >= 0 && tookInNanos > queryTraceThreshold) { + queryLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } + } + + public void onFetchPhase(SearchContext context, long tookInNanos) { + if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold) { + fetchLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (fetchInfoThreshold >= 0 && tookInNanos > fetchInfoThreshold) { + fetchLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (fetchDebugThreshold >= 0 && tookInNanos > fetchDebugThreshold) { + fetchLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (fetchTraceThreshold >= 0 && tookInNanos > fetchTraceThreshold) { + fetchLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } + } + + private static class SlowLogSearchContextPrinter { + private final SearchContext context; + private final long tookInNanos; + private final boolean reformat; + + public SlowLogSearchContextPrinter(SearchContext context, long tookInNanos, boolean reformat) { + this.context = context; + this.tookInNanos = tookInNanos; + this.reformat = reformat; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], "); + if (context.types() == null) { + sb.append("types[], "); + } else { + sb.append("types["); + Strings.arrayToDelimitedString(context.types(), ",", sb); + sb.append("], "); + } + if (context.groupStats() == null) { + sb.append("stats[], "); + } else { + sb.append("stats["); + Strings.collectionToDelimitedString(context.groupStats(), ",", "", "", sb); + sb.append("], "); + } + sb.append("search_type[").append(context.searchType()).append("], total_shards[").append(context.numberOfShards()).append("], "); + if (context.request().source() != null) { + sb.append("source[").append(context.request().source()).append("], "); + } else { + sb.append("source[], "); + } + return sb.toString(); + } + } + + private void setReformat(boolean reformat) { + this.reformat = reformat; + } + + private void setQueryWarnThreshold(TimeValue warnThreshold) { + this.queryWarnThreshold = warnThreshold.nanos(); + } + + private void setQueryInfoThreshold(TimeValue infoThreshold) { + this.queryInfoThreshold = infoThreshold.nanos(); + } + + private void setQueryDebugThreshold(TimeValue debugThreshold) { + this.queryDebugThreshold = debugThreshold.nanos(); + } + + private void setQueryTraceThreshold(TimeValue traceThreshold) { + this.queryTraceThreshold = traceThreshold.nanos(); + } + + private void setFetchWarnThreshold(TimeValue warnThreshold) { + this.fetchWarnThreshold = warnThreshold.nanos(); + } + + private void setFetchInfoThreshold(TimeValue infoThreshold) { + this.fetchInfoThreshold = infoThreshold.nanos(); + } + + private void setFetchDebugThreshold(TimeValue debugThreshold) { + this.fetchDebugThreshold = debugThreshold.nanos(); + } + + private void setFetchTraceThreshold(TimeValue traceThreshold) { + this.fetchTraceThreshold = traceThreshold.nanos(); + } + + boolean isReformat() { + return reformat; + } + + long getQueryWarnThreshold() { + return queryWarnThreshold; + } + + long getQueryInfoThreshold() { + return queryInfoThreshold; + } + + long getQueryDebugThreshold() { + return queryDebugThreshold; + } + + long getQueryTraceThreshold() { + return queryTraceThreshold; + } + + long getFetchWarnThreshold() { + return fetchWarnThreshold; + } + + long getFetchInfoThreshold() { + return fetchInfoThreshold; + } + + long getFetchDebugThreshold() { + return fetchDebugThreshold; + } + + long getFetchTraceThreshold() { + return fetchTraceThreshold; + } + + SlowLogLevel getLevel() { + return level; + } +} diff --git a/core/src/test/java/org/elasticsearch/recovery/SmallTranslogOpsRecoveryIT.java b/core/src/main/java/org/elasticsearch/index/SlowLogLevel.java similarity index 71% rename from core/src/test/java/org/elasticsearch/recovery/SmallTranslogOpsRecoveryIT.java rename to core/src/main/java/org/elasticsearch/index/SlowLogLevel.java index e716736252c..c5b65ebb8a9 100644 --- a/core/src/test/java/org/elasticsearch/recovery/SmallTranslogOpsRecoveryIT.java +++ b/core/src/main/java/org/elasticsearch/index/SlowLogLevel.java @@ -16,18 +16,13 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.index; -package org.elasticsearch.recovery; +import java.util.Locale; -import org.elasticsearch.common.settings.Settings; - -/** - * - */ -public class SmallTranslogOpsRecoveryIT extends SimpleRecoveryIT { - - @Override - protected Settings recoverySettings() { - return Settings.settingsBuilder().put("index.shard.recovery.translog_ops", 1).build(); +public enum SlowLogLevel { + WARN, TRACE, INFO, DEBUG; + public static SlowLogLevel parse(String level) { + return valueOf(level.toUpperCase(Locale.ROOT)); } } diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 1de139aa695..f18cc631aea 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; @@ -69,7 +70,7 @@ import java.util.concurrent.Executor; */ public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener>, Closeable { - public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly"; + public static final Setting INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING = Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, false, Setting.Scope.INDEX); private final boolean loadRandomAccessFiltersEagerly; private final Cache> loadedFilters; @@ -82,14 +83,14 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L if (listener == null) { throw new IllegalArgumentException("listener must not be null"); } - this.loadRandomAccessFiltersEagerly = this.indexSettings.getSettings().getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true); + this.loadRandomAccessFiltersEagerly = this.indexSettings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING); this.loadedFilters = CacheBuilder.>builder().removalListener(this).build(); this.warmer = new BitSetProducerWarmer(); this.indicesWarmer = indicesWarmer; indicesWarmer.addListener(warmer); this.listener = listener; } - + public BitSetProducer getBitSetProducer(Query query) { diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 0e112118da8..79610906b14 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1065,7 +1065,7 @@ public abstract class Engine implements Closeable { } } - public static class CommitId implements Writeable { + public static class CommitId implements Writeable { private final byte[] id; diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index 7d68eecf8a3..221df42b055 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.similarities.Similarity; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -38,6 +39,8 @@ import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.threadpool.ThreadPool; +import java.util.Set; + /* * Holds all the configuration that is used to create an {@link Engine}. * Once {@link Engine} has been created with this object, changes to this @@ -64,16 +67,30 @@ public final class EngineConfig { private final QueryCache queryCache; private final QueryCachingPolicy queryCachingPolicy; + static { + + } /** * Index setting to change the low level lucene codec used for writing new segments. * This setting is not realtime updateable. */ - public static final String INDEX_CODEC_SETTING = "index.codec"; + public static final Setting INDEX_CODEC_SETTING = new Setting<>("index.codec", "default", (s) -> { + switch(s) { + case "default": + case "best_compression": + case "lucene_default": + return s; + default: + if (Codec.availableCodecs().contains(s) == false) { // we don't error message the not officially supported ones + throw new IllegalArgumentException("unknown value for [index.codec] must be one of [default, best_compression] but was: " + s); + } + return s; + } + }, false, Setting.Scope.INDEX); /** if set to true the engine will start even if the translog id in the commit point can not be found */ public static final String INDEX_FORCE_NEW_TRANSLOG = "index.engine.force_new_translog"; - private static final String DEFAULT_CODEC_NAME = "default"; private TranslogConfig translogConfig; private boolean create = false; @@ -97,7 +114,7 @@ public final class EngineConfig { this.similarity = similarity; this.codecService = codecService; this.eventListener = eventListener; - codecName = settings.get(EngineConfig.INDEX_CODEC_SETTING, EngineConfig.DEFAULT_CODEC_NAME); + codecName = indexSettings.getValue(INDEX_CODEC_SETTING); // We give IndexWriter a "huge" (256 MB) buffer, so it won't flush on its own unless the ES indexing buffer is also huge and/or // there are not too many shards allocated to this node. Instead, IndexingMemoryController periodically checks // and refreshes the most heap-consuming shards when total indexing heap usage across all shards is too high: diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 74cac49b76d..d9ee2f4177a 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -231,7 +231,8 @@ public class InternalEngine extends Engine { protected void recoverFromTranslog(EngineConfig engineConfig, Translog.TranslogGeneration translogGeneration) throws IOException { int opsRecovered = 0; final TranslogRecoveryPerformer handler = engineConfig.getTranslogRecoveryPerformer(); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { + try { + Translog.Snapshot snapshot = translog.newSnapshot(); Translog.Operation operation; while ((operation = snapshot.next()) != null) { try { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 8ac0bda2f0b..b898f3ffd2a 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.Accountable; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData; @@ -55,9 +56,17 @@ import static java.util.Collections.unmodifiableMap; /** */ public class IndexFieldDataService extends AbstractIndexComponent implements Closeable { - - public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache"; public static final String FIELDDATA_CACHE_VALUE_NODE = "node"; + public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache"; + public static final Setting INDEX_FIELDDATA_CACHE_KEY = new Setting<>(FIELDDATA_CACHE_KEY, (s) -> FIELDDATA_CACHE_VALUE_NODE, (s) -> { + switch (s) { + case "node": + case "none": + return s; + default: + throw new IllegalArgumentException("failed to parse [" + s + "] must be one of [node,node]"); + } + }, false, Setting.Scope.INDEX); private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> { throw new IllegalStateException("Can't load fielddata on [" + fieldType.name() @@ -228,7 +237,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo if (cache == null) { // we default to node level cache, which in turn defaults to be unbounded // this means changing the node level settings is simple, just set the bounds there - String cacheType = type.getSettings().get("cache", indexSettings.getSettings().get(FIELDDATA_CACHE_KEY, FIELDDATA_CACHE_VALUE_NODE)); + String cacheType = type.getSettings().get("cache", indexSettings.getValue(INDEX_FIELDDATA_CACHE_KEY)); if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) { cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName, type); } else if ("none".equals(cacheType)){ diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index c2d644d393d..0528541238a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -79,10 +79,20 @@ public class DocumentMapper implements ToXContent { this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.rootObjectMapper = builder.build(builderContext); + final String type = rootObjectMapper.name(); + DocumentMapper existingMapper = mapperService.documentMapper(type); for (Map.Entry entry : mapperService.mapperRegistry.getMetadataMapperParsers().entrySet()) { final String name = entry.getKey(); - final TypeParser parser = entry.getValue(); - final MetadataFieldMapper metadataMapper = parser.getDefault(indexSettings, mapperService.fullName(name), builder.name()); + final MetadataFieldMapper existingMetadataMapper = existingMapper == null + ? null + : (MetadataFieldMapper) existingMapper.mappers().getMapper(name); + final MetadataFieldMapper metadataMapper; + if (existingMetadataMapper == null) { + final TypeParser parser = entry.getValue(); + metadataMapper = parser.getDefault(indexSettings, mapperService.fullName(name), builder.name()); + } else { + metadataMapper = existingMetadataMapper; + } metadataMappers.put(metadataMapper.getClass(), metadataMapper); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 23d8cd56034..3eca73cffe5 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -27,6 +27,7 @@ import org.apache.lucene.index.IndexOptions; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -47,7 +48,8 @@ import java.util.Map; import java.util.stream.StreamSupport; public abstract class FieldMapper extends Mapper implements Cloneable { - + public static final Setting IGNORE_MALFORMED_SETTING = Setting.boolSetting("index.mapping.ignore_malformed", false, false, Setting.Scope.INDEX); + public static final Setting COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", false, false, Setting.Scope.INDEX); public abstract static class Builder extends Mapper.Builder { protected final MappedFieldType fieldType; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 25796c605f7..999eeb2edf9 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectHashSet; + import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.apache.lucene.index.IndexOptions; @@ -33,11 +34,11 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchGenerationException; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; @@ -59,7 +60,6 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -78,9 +78,26 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; */ public class MapperService extends AbstractIndexComponent implements Closeable { + /** + * The reason why a mapping is being merged. + */ + public enum MergeReason { + /** + * Create or update a mapping. + */ + MAPPING_UPDATE, + /** + * Recovery of an existing mapping, for instance because of a restart, + * if a shard was moved to a different node or for administrative + * purposes. + */ + MAPPING_RECOVERY; + } + public static final String DEFAULT_MAPPING = "_default_"; - public static final String INDEX_MAPPER_DYNAMIC_SETTING = "index.mapper.dynamic"; + public static final Setting INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING = Setting.longSetting("index.mapping.nested_fields.limit", 50l, 0, true, Setting.Scope.INDEX); public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true; + public static final Setting INDEX_MAPPER_DYNAMIC_SETTING = Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, false, Setting.Scope.INDEX); private static ObjectHashSet META_FIELDS = ObjectHashSet.from( "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" @@ -128,7 +145,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer()); this.mapperRegistry = mapperRegistry; - this.dynamic = this.indexSettings.getSettings().getAsBoolean(INDEX_MAPPER_DYNAMIC_SETTING, INDEX_MAPPER_DYNAMIC_DEFAULT); + this.dynamic = this.indexSettings.getValue(INDEX_MAPPER_DYNAMIC_SETTING); defaultPercolatorMappingSource = "{\n" + "\"_default_\":{\n" + "\"properties\" : {\n" + @@ -203,7 +220,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { typeListeners.remove(listener); } - public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault, boolean updateAllTypes) { + public DocumentMapper merge(String type, CompressedXContent mappingSource, MergeReason reason, boolean updateAllTypes) { if (DEFAULT_MAPPING.equals(type)) { // verify we can parse it // NOTE: never apply the default here @@ -221,14 +238,18 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return mapper; } else { synchronized (this) { - // only apply the default mapping if we don't have the type yet - applyDefault &= mappers.containsKey(type) == false; - return merge(parse(type, mappingSource, applyDefault), updateAllTypes); + final boolean applyDefault = + // the default was already applied if we are recovering + reason != MergeReason.MAPPING_RECOVERY + // only apply the default mapping if we don't have the type yet + && mappers.containsKey(type) == false; + DocumentMapper mergeWith = parse(type, mappingSource, applyDefault); + return merge(mergeWith, reason, updateAllTypes); } } } - private synchronized DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) { + private synchronized DocumentMapper merge(DocumentMapper mapper, MergeReason reason, boolean updateAllTypes) { if (mapper.type().length() == 0) { throw new InvalidTypeNameException("mapping type name is empty"); } @@ -281,6 +302,11 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers); + + if (reason == MergeReason.MAPPING_UPDATE) { + checkNestedFieldsLimit(fullPathObjectMappers); + } + Set parentTypes = this.parentTypes; if (oldMapper == null && newMapper.parentFieldMapper().active()) { parentTypes = new HashSet<>(parentTypes.size() + 1); @@ -393,6 +419,19 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } + private void checkNestedFieldsLimit(Map fullPathObjectMappers) { + long allowedNestedFields = indexSettings.getValue(INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING); + long actualNestedFields = 0; + for (ObjectMapper objectMapper : fullPathObjectMappers.values()) { + if (objectMapper.nested().isNested()) { + actualNestedFields++; + } + } + if (allowedNestedFields >= 0 && actualNestedFields > allowedNestedFields) { + throw new IllegalArgumentException("Limit of nested fields [" + allowedNestedFields + "] in index [" + index().name() + "] has been exceeded"); + } + } + public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { String defaultMappingSource; if (PercolatorService.TYPE_NAME.equals(mappingType)) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index a0a5e5e5bce..5f829fdecea 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -51,6 +52,7 @@ import java.util.List; * */ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { + private static final Setting COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", true, false, Setting.Scope.INDEX); // this is private since it has a different default public static class Defaults { @@ -89,7 +91,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM return new Explicit<>(ignoreMalformed, true); } if (context.indexSettings() != null) { - return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false); + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); } return Defaults.IGNORE_MALFORMED; } @@ -104,7 +106,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM return new Explicit<>(coerce, true); } if (context.indexSettings() != null) { - return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false); + return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); } return Defaults.COERCE; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 52202fac716..29a2aca7bec 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -59,7 +60,6 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; */ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser { public static final String CONTENT_TYPE = "geo_point"; - public static class Names { public static final String LAT = "lat"; public static final String LAT_SUFFIX = "." + LAT; @@ -142,7 +142,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr return new Explicit<>(ignoreMalformed, true); } if (context.indexSettings() != null) { - return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false); + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); } return Defaults.IGNORE_MALFORMED; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index 8c954c06a5e..c008be6f673 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -102,7 +102,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement return new Explicit<>(coerce, true); } if (context.indexSettings() != null) { - return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false); + return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); } return Defaults.COERCE; } @@ -364,4 +364,4 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 2ea59393ca0..0de2cd2b60b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -137,7 +137,7 @@ public class GeoShapeFieldMapper extends FieldMapper { return new Explicit<>(coerce, true); } if (context.indexSettings() != null) { - return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false); + return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); } return Defaults.COERCE; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 9984463ffc0..c83428d2239 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -213,11 +213,24 @@ public class IpFieldMapper extends NumberFieldMapper { @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { if (value != null) { - long[] fromTo; + String term; if (value instanceof BytesRef) { - fromTo = Cidrs.cidrMaskToMinMax(((BytesRef) value).utf8ToString()); + term = ((BytesRef) value).utf8ToString(); } else { - fromTo = Cidrs.cidrMaskToMinMax(value.toString()); + term = value.toString(); + } + long[] fromTo; + // assume that the term is either a CIDR range or the + // term is a single IPv4 address; if either of these + // assumptions is wrong, the CIDR parsing will fail + // anyway, and that is okay + if (term.contains("/")) { + // treat the term as if it is in CIDR notation + fromTo = Cidrs.cidrMaskToMinMax(term); + } else { + // treat the term as if it is a single IPv4, and + // apply a CIDR mask equivalent to the host route + fromTo = Cidrs.cidrMaskToMinMax(term + "/32"); } if (fromTo != null) { return rangeQuery(fromTo[0] == 0 ? null : fromTo[0], diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java index 9a57ea57764..c4b2b06e0e0 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java @@ -119,7 +119,7 @@ public class PercolatorFieldMapper extends FieldMapper { this.queryShardContext = queryShardContext; this.queryTermsField = queryTermsField; this.unknownQueryField = unknownQueryField; - this.mapUnmappedFieldAsString = indexSettings.getAsBoolean(PercolatorQueriesRegistry.MAP_UNMAPPED_FIELDS_AS_STRING, false); + this.mapUnmappedFieldAsString = PercolatorQueriesRegistry.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java index 143616b7084..f55a739caf3 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -60,7 +61,7 @@ import java.util.concurrent.TimeUnit; */ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent implements Closeable { - public final static String MAP_UNMAPPED_FIELDS_AS_STRING = "index.percolator.map_unmapped_fields_as_string"; + public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, false, Setting.Scope.INDEX); private final ConcurrentMap percolateQueries = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); private final QueryShardContext queryShardContext; @@ -72,7 +73,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent public PercolatorQueriesRegistry(ShardId shardId, IndexSettings indexSettings, QueryShardContext queryShardContext) { super(shardId, indexSettings); this.queryShardContext = queryShardContext; - this.mapUnmappedFieldsAsString = this.indexSettings.getSettings().getAsBoolean(MAP_UNMAPPED_FIELDS_AS_STRING, false); + this.mapUnmappedFieldsAsString = indexSettings.getValue(INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING); } public ConcurrentMap getPercolateQueries() { diff --git a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index 79ba3804ecd..d2116ae3c05 100644 --- a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -42,7 +42,7 @@ import java.util.Objects; * Base class for all classes producing lucene queries. * Supports conversion to BytesReference and creation of lucene Query objects. */ -public abstract class AbstractQueryBuilder extends ToXContentToBytes implements QueryBuilder { +public abstract class AbstractQueryBuilder> extends ToXContentToBytes implements QueryBuilder { /** Default for boost to apply to resulting Lucene query. Defaults to 1.0*/ public static final float DEFAULT_BOOST = 1.0f; @@ -225,10 +225,10 @@ public abstract class AbstractQueryBuilder exte * their {@link QueryBuilder#toQuery(QueryShardContext)} method are not added to the * resulting collection. */ - protected static Collection toQueries(Collection queryBuilders, QueryShardContext context) throws QueryShardException, + protected static Collection toQueries(Collection> queryBuilders, QueryShardContext context) throws QueryShardException, IOException { List queries = new ArrayList<>(queryBuilders.size()); - for (QueryBuilder queryBuilder : queryBuilders) { + for (QueryBuilder queryBuilder : queryBuilders) { Query query = queryBuilder.toQuery(context); if (query != null) { queries.add(query); @@ -243,15 +243,15 @@ public abstract class AbstractQueryBuilder exte return getWriteableName(); } - protected final void writeQueries(StreamOutput out, List queries) throws IOException { + protected final void writeQueries(StreamOutput out, List> queries) throws IOException { out.writeVInt(queries.size()); - for (QueryBuilder query : queries) { + for (QueryBuilder query : queries) { out.writeQuery(query); } } - protected final List readQueries(StreamInput in) throws IOException { - List queries = new ArrayList<>(); + protected final List> readQueries(StreamInput in) throws IOException { + List> queries = new ArrayList<>(); int size = in.readVInt(); for (int i = 0; i < size; i++) { queries.add(in.readQuery()); diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java index 69ee2a81061..f7f4926d950 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java @@ -49,13 +49,13 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { static final BoolQueryBuilder PROTOTYPE = new BoolQueryBuilder(); - private final List mustClauses = new ArrayList<>(); + private final List> mustClauses = new ArrayList<>(); - private final List mustNotClauses = new ArrayList<>(); + private final List> mustNotClauses = new ArrayList<>(); - private final List filterClauses = new ArrayList<>(); + private final List> filterClauses = new ArrayList<>(); - private final List shouldClauses = new ArrayList<>(); + private final List> shouldClauses = new ArrayList<>(); private boolean disableCoord = DISABLE_COORD_DEFAULT; @@ -67,7 +67,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * Adds a query that must appear in the matching documents and will * contribute to scoring. No null value allowed. */ - public BoolQueryBuilder must(QueryBuilder queryBuilder) { + public BoolQueryBuilder must(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -78,7 +78,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { /** * Gets the queries that must appear in the matching documents. */ - public List must() { + public List> must() { return this.mustClauses; } @@ -86,7 +86,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * Adds a query that must appear in the matching documents but will * not contribute to scoring. No null value allowed. */ - public BoolQueryBuilder filter(QueryBuilder queryBuilder) { + public BoolQueryBuilder filter(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -95,9 +95,9 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { } /** - * Gets the queries that must appear in the matching documents but don't conntribute to scoring + * Gets the queries that must appear in the matching documents but don't contribute to scoring */ - public List filter() { + public List> filter() { return this.filterClauses; } @@ -105,7 +105,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * Adds a query that must not appear in the matching documents. * No null value allowed. */ - public BoolQueryBuilder mustNot(QueryBuilder queryBuilder) { + public BoolQueryBuilder mustNot(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -116,7 +116,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { /** * Gets the queries that must not appear in the matching documents. */ - public List mustNot() { + public List> mustNot() { return this.mustNotClauses; } @@ -127,7 +127,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * * @see #minimumNumberShouldMatch(int) */ - public BoolQueryBuilder should(QueryBuilder queryBuilder) { + public BoolQueryBuilder should(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -141,7 +141,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * @see #should(QueryBuilder) * @see #minimumNumberShouldMatch(int) */ - public List should() { + public List> should() { return this.shouldClauses; } @@ -244,12 +244,12 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { builder.endObject(); } - private static void doXArrayContent(String field, List clauses, XContentBuilder builder, Params params) throws IOException { + private static void doXArrayContent(String field, List> clauses, XContentBuilder builder, Params params) throws IOException { if (clauses.isEmpty()) { return; } builder.startArray(field); - for (QueryBuilder clause : clauses) { + for (QueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); @@ -282,8 +282,8 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { return adjustPureNegative ? fixNegativeQueryIfNeeded(query) : query; } - private static void addBooleanClauses(QueryShardContext context, BooleanQuery.Builder booleanQueryBuilder, List clauses, Occur occurs) throws IOException { - for (QueryBuilder query : clauses) { + private static void addBooleanClauses(QueryShardContext context, BooleanQuery.Builder booleanQueryBuilder, List> clauses, Occur occurs) throws IOException { + for (QueryBuilder query : clauses) { Query luceneQuery = null; switch (occurs) { case MUST: @@ -321,7 +321,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { @Override protected BoolQueryBuilder doReadFrom(StreamInput in) throws IOException { BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); - List queryBuilders = readQueries(in); + List> queryBuilders = readQueries(in); boolQueryBuilder.mustClauses.addAll(queryBuilders); queryBuilders = readQueries(in); boolQueryBuilder.mustNotClauses.addAll(queryBuilders); diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java index d0d130f9774..53c1b81989f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java @@ -19,17 +19,14 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanQuery; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentParser; - import java.io.IOException; import java.util.ArrayList; import java.util.List; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParser; + /** * Parser for bool query */ @@ -45,11 +42,6 @@ public class BoolQueryParser implements QueryParser { public static final ParseField MINIMUM_NUMBER_SHOULD_MATCH = new ParseField("minimum_number_should_match"); public static final ParseField ADJUST_PURE_NEGATIVE = new ParseField("adjust_pure_negative"); - @Inject - public BoolQueryParser(Settings settings) { - BooleanQuery.setMaxClauseCount(settings.getAsInt("index.query.bool.max_clause_count", settings.getAsInt("indices.query.bool.max_clause_count", BooleanQuery.getMaxClauseCount()))); - } - @Override public String[] names() { return new String[]{BoolQueryBuilder.NAME}; diff --git a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java index 43b1917d318..e682ae2c1c0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java @@ -40,7 +40,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder public static final String NAME = "dis_max"; - private final ArrayList queries = new ArrayList<>(); + private final List> queries = new ArrayList<>(); /** Default multiplication factor for breaking ties in document scores.*/ public static float DEFAULT_TIE_BREAKER = 0.0f; @@ -51,7 +51,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder /** * Add a sub-query to this disjunction. */ - public DisMaxQueryBuilder add(QueryBuilder queryBuilder) { + public DisMaxQueryBuilder add(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner dismax query clause cannot be null"); } @@ -62,7 +62,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder /** * @return an immutable list copy of the current sub-queries of this disjunction */ - public List innerQueries() { + public List> innerQueries() { return this.queries; } @@ -90,7 +90,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder builder.startObject(NAME); builder.field(DisMaxQueryParser.TIE_BREAKER_FIELD.getPreferredName(), tieBreaker); builder.startArray(DisMaxQueryParser.QUERIES_FIELD.getPreferredName()); - for (QueryBuilder queryBuilder : queries) { + for (QueryBuilder queryBuilder : queries) { queryBuilder.toXContent(builder, params); } builder.endArray(); @@ -112,7 +112,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder @Override protected DisMaxQueryBuilder doReadFrom(StreamInput in) throws IOException { DisMaxQueryBuilder disMax = new DisMaxQueryBuilder(); - List queryBuilders = readQueries(in); + List> queryBuilders = readQueries(in); disMax.queries.addAll(queryBuilders); disMax.tieBreaker = in.readFloat(); return disMax; diff --git a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java index 7cdb66bd126..ba5d7c2447e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java @@ -19,15 +19,14 @@ package org.elasticsearch.index.query; +import java.io.IOException; + import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.support.QueryInnerHits; -import java.io.IOException; - public class NestedQueryParser implements QueryParser { private static final NestedQueryBuilder PROTOTYPE = new NestedQueryBuilder("", EmptyQueryBuilder.PROTOTYPE); @@ -38,7 +37,7 @@ public class NestedQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NestedQueryBuilder.NAME, Strings.toCamelCase(NestedQueryBuilder.NAME)}; + return new String[]{NestedQueryBuilder.NAME}; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java index 2fde316a561..b75406c8641 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import java.io.IOException; -public interface QueryBuilder extends NamedWriteable, ToXContent { +public interface QueryBuilder> extends NamedWriteable, ToXContent { /** * Converts this QueryBuilder to a lucene {@link Query}. diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java index 80eb94f4276..5f30ac28e7f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanNearQueryBuilder.java @@ -43,7 +43,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clauses = new ArrayList<>(); + private final List> clauses = new ArrayList<>(); private final int slop; @@ -55,7 +55,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder initialClause, int slop) { if (initialClause == null) { throw new IllegalArgumentException("query must include at least one clause"); } @@ -70,7 +70,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clause) { if (clause == null) { throw new IllegalArgumentException("query clauses cannot be null"); } @@ -81,7 +81,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clauses() { + public List> clauses() { return this.clauses; } @@ -106,7 +106,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); @@ -129,10 +129,10 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clauses = readQueries(in); - SpanNearQueryBuilder queryBuilder = new SpanNearQueryBuilder((SpanQueryBuilder)clauses.get(0), in.readVInt()); + List> clauses = readQueries(in); + SpanNearQueryBuilder queryBuilder = new SpanNearQueryBuilder((SpanQueryBuilder)clauses.get(0), in.readVInt()); for (int i = 1; i < clauses.size(); i++) { - queryBuilder.clauses.add((SpanQueryBuilder)clauses.get(i)); + queryBuilder.clauses.add((SpanQueryBuilder)clauses.get(i)); } queryBuilder.inOrder = in.readBoolean(); return queryBuilder; diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java index 3b8681c685b..8d1fb4f7ff3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java @@ -38,18 +38,18 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder public static final String NAME = "span_or"; - private final List clauses = new ArrayList<>(); + private final List> clauses = new ArrayList<>(); static final SpanOrQueryBuilder PROTOTYPE = new SpanOrQueryBuilder(SpanTermQueryBuilder.PROTOTYPE); - public SpanOrQueryBuilder(SpanQueryBuilder initialClause) { + public SpanOrQueryBuilder(SpanQueryBuilder initialClause) { if (initialClause == null) { throw new IllegalArgumentException("query must include at least one clause"); } clauses.add(initialClause); } - public SpanOrQueryBuilder clause(SpanQueryBuilder clause) { + public SpanOrQueryBuilder clause(SpanQueryBuilder clause) { if (clause == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -60,7 +60,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder /** * @return the {@link SpanQueryBuilder} clauses that were set for this query */ - public List clauses() { + public List> clauses() { return this.clauses; } @@ -68,7 +68,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); builder.startArray(SpanOrQueryParser.CLAUSES_FIELD.getPreferredName()); - for (SpanQueryBuilder clause : clauses) { + for (SpanQueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); @@ -89,10 +89,10 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder @Override protected SpanOrQueryBuilder doReadFrom(StreamInput in) throws IOException { - List clauses = readQueries(in); - SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder((SpanQueryBuilder)clauses.get(0)); + List> clauses = readQueries(in); + SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder((SpanQueryBuilder)clauses.get(0)); for (int i = 1; i < clauses.size(); i++) { - queryBuilder.clauses.add((SpanQueryBuilder)clauses.get(i)); + queryBuilder.clauses.add((SpanQueryBuilder)clauses.get(i)); } return queryBuilder; diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java index d35dcbc536a..90a75a5af1b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java @@ -22,6 +22,6 @@ package org.elasticsearch.index.query; /** * Marker interface for a specific type of {@link QueryBuilder} that allows to build span queries */ -public interface SpanQueryBuilder extends QueryBuilder { +public interface SpanQueryBuilder> extends QueryBuilder { } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java index 6822ab3e240..8e503e24d3c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java @@ -19,10 +19,13 @@ package org.elasticsearch.index.query.functionscore; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; @@ -36,10 +39,6 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - /** * Parser for function_score query */ @@ -54,7 +53,6 @@ public class FunctionScoreQueryParser implements QueryParser> functionParsers; - @Inject - public ScoreFunctionParserMapper(Set parsers, NamedWriteableRegistry namedWriteableRegistry) { - Map> map = new HashMap<>(); - // built-in parsers - addParser(new ScriptScoreFunctionParser(), map, namedWriteableRegistry); - addParser(new GaussDecayFunctionParser(), map, namedWriteableRegistry); - addParser(new LinearDecayFunctionParser(), map, namedWriteableRegistry); - addParser(new ExponentialDecayFunctionParser(), map, namedWriteableRegistry); - addParser(new RandomScoreFunctionParser(), map, namedWriteableRegistry); - addParser(new FieldValueFactorFunctionParser(), map, namedWriteableRegistry); - for (ScoreFunctionParser scoreFunctionParser : parsers) { - addParser(scoreFunctionParser, map, namedWriteableRegistry); - } - this.functionParsers = Collections.unmodifiableMap(map); - //weight doesn't have its own parser, so every function supports it out of the box. - //Can be a single function too when not associated to any other function, which is why it needs to be registered manually here. - namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, new WeightBuilder()); + public ScoreFunctionParserMapper(Map> functionParsers) { + this.functionParsers = unmodifiableMap(functionParsers); } - public ScoreFunctionParser get(XContentLocation contentLocation, String parserName) { - ScoreFunctionParser functionParser = get(parserName); + public ScoreFunctionParser get(XContentLocation contentLocation, String parserName) { + ScoreFunctionParser functionParser = get(parserName); if (functionParser == null) { throw new ParsingException(contentLocation, "No function with the name [" + parserName + "] is registered."); } return functionParser; } - private ScoreFunctionParser get(String parserName) { + private ScoreFunctionParser get(String parserName) { return functionParsers.get(parserName); } - - private static void addParser(ScoreFunctionParser scoreFunctionParser, Map> map, NamedWriteableRegistry namedWriteableRegistry) { - for (String name : scoreFunctionParser.getNames()) { - map.put(name, scoreFunctionParser); - - } - @SuppressWarnings("unchecked") NamedWriteable sfb = scoreFunctionParser.getBuilderPrototype(); - namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, sfb); - } } diff --git a/core/src/main/java/org/elasticsearch/index/search/stats/SearchSlowLog.java b/core/src/main/java/org/elasticsearch/index/search/stats/SearchSlowLog.java deleted file mode 100644 index f3089b9a5f7..00000000000 --- a/core/src/main/java/org/elasticsearch/index/search/stats/SearchSlowLog.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.stats; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.search.internal.SearchContext; - -import java.util.Locale; -import java.util.concurrent.TimeUnit; - -/** - */ -public final class SearchSlowLog { - - private boolean reformat; - - private long queryWarnThreshold; - private long queryInfoThreshold; - private long queryDebugThreshold; - private long queryTraceThreshold; - - private long fetchWarnThreshold; - private long fetchInfoThreshold; - private long fetchDebugThreshold; - private long fetchTraceThreshold; - - private String level; - - private final ESLogger queryLogger; - private final ESLogger fetchLogger; - - private static final String INDEX_SEARCH_SLOWLOG_PREFIX = "index.search.slowlog"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.warn"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.info"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.debug"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.trace"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.warn"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.info"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.debug"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.trace"; - public static final String INDEX_SEARCH_SLOWLOG_REFORMAT = INDEX_SEARCH_SLOWLOG_PREFIX + ".reformat"; - public static final String INDEX_SEARCH_SLOWLOG_LEVEL = INDEX_SEARCH_SLOWLOG_PREFIX + ".level"; - - public SearchSlowLog(Settings indexSettings) { - - this.reformat = indexSettings.getAsBoolean(INDEX_SEARCH_SLOWLOG_REFORMAT, true); - - this.queryWarnThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN, TimeValue.timeValueNanos(-1)).nanos(); - this.queryInfoThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO, TimeValue.timeValueNanos(-1)).nanos(); - this.queryDebugThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG, TimeValue.timeValueNanos(-1)).nanos(); - this.queryTraceThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE, TimeValue.timeValueNanos(-1)).nanos(); - - this.fetchWarnThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN, TimeValue.timeValueNanos(-1)).nanos(); - this.fetchInfoThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO, TimeValue.timeValueNanos(-1)).nanos(); - this.fetchDebugThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG, TimeValue.timeValueNanos(-1)).nanos(); - this.fetchTraceThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE, TimeValue.timeValueNanos(-1)).nanos(); - - this.level = indexSettings.get(INDEX_SEARCH_SLOWLOG_LEVEL, "TRACE").toUpperCase(Locale.ROOT); - - this.queryLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query"); - this.fetchLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch"); - - queryLogger.setLevel(level); - fetchLogger.setLevel(level); - } - - void onQueryPhase(SearchContext context, long tookInNanos) { - if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold) { - queryLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (queryInfoThreshold >= 0 && tookInNanos > queryInfoThreshold) { - queryLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (queryDebugThreshold >= 0 && tookInNanos > queryDebugThreshold) { - queryLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (queryTraceThreshold >= 0 && tookInNanos > queryTraceThreshold) { - queryLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } - } - - void onFetchPhase(SearchContext context, long tookInNanos) { - if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold) { - fetchLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (fetchInfoThreshold >= 0 && tookInNanos > fetchInfoThreshold) { - fetchLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (fetchDebugThreshold >= 0 && tookInNanos > fetchDebugThreshold) { - fetchLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (fetchTraceThreshold >= 0 && tookInNanos > fetchTraceThreshold) { - fetchLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } - } - - public void onRefreshSettings(Settings settings) { - long queryWarnThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN, TimeValue.timeValueNanos(this.queryWarnThreshold)).nanos(); - if (queryWarnThreshold != this.queryWarnThreshold) { - this.queryWarnThreshold = queryWarnThreshold; - } - long queryInfoThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO, TimeValue.timeValueNanos(this.queryInfoThreshold)).nanos(); - if (queryInfoThreshold != this.queryInfoThreshold) { - this.queryInfoThreshold = queryInfoThreshold; - } - long queryDebugThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG, TimeValue.timeValueNanos(this.queryDebugThreshold)).nanos(); - if (queryDebugThreshold != this.queryDebugThreshold) { - this.queryDebugThreshold = queryDebugThreshold; - } - long queryTraceThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE, TimeValue.timeValueNanos(this.queryTraceThreshold)).nanos(); - if (queryTraceThreshold != this.queryTraceThreshold) { - this.queryTraceThreshold = queryTraceThreshold; - } - - long fetchWarnThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN, TimeValue.timeValueNanos(this.fetchWarnThreshold)).nanos(); - if (fetchWarnThreshold != this.fetchWarnThreshold) { - this.fetchWarnThreshold = fetchWarnThreshold; - } - long fetchInfoThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO, TimeValue.timeValueNanos(this.fetchInfoThreshold)).nanos(); - if (fetchInfoThreshold != this.fetchInfoThreshold) { - this.fetchInfoThreshold = fetchInfoThreshold; - } - long fetchDebugThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG, TimeValue.timeValueNanos(this.fetchDebugThreshold)).nanos(); - if (fetchDebugThreshold != this.fetchDebugThreshold) { - this.fetchDebugThreshold = fetchDebugThreshold; - } - long fetchTraceThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE, TimeValue.timeValueNanos(this.fetchTraceThreshold)).nanos(); - if (fetchTraceThreshold != this.fetchTraceThreshold) { - this.fetchTraceThreshold = fetchTraceThreshold; - } - - String level = settings.get(INDEX_SEARCH_SLOWLOG_LEVEL, this.level); - if (!level.equals(this.level)) { - this.queryLogger.setLevel(level.toUpperCase(Locale.ROOT)); - this.fetchLogger.setLevel(level.toUpperCase(Locale.ROOT)); - this.level = level; - } - - boolean reformat = settings.getAsBoolean(INDEX_SEARCH_SLOWLOG_REFORMAT, this.reformat); - if (reformat != this.reformat) { - this.reformat = reformat; - } - } - - private static class SlowLogSearchContextPrinter { - private final SearchContext context; - private final long tookInNanos; - private final boolean reformat; - - public SlowLogSearchContextPrinter(SearchContext context, long tookInNanos, boolean reformat) { - this.context = context; - this.tookInNanos = tookInNanos; - this.reformat = reformat; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], "); - if (context.types() == null) { - sb.append("types[], "); - } else { - sb.append("types["); - Strings.arrayToDelimitedString(context.types(), ",", sb); - sb.append("], "); - } - if (context.groupStats() == null) { - sb.append("stats[], "); - } else { - sb.append("stats["); - Strings.collectionToDelimitedString(context.groupStats(), ",", "", "", sb); - sb.append("], "); - } - sb.append("search_type[").append(context.searchType()).append("], total_shards[").append(context.numberOfShards()).append("], "); - if (context.request().source() != null) { - sb.append("source[").append(context.request().source()).append("], "); - } else { - sb.append("source[], "); - } - return sb.toString(); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java b/core/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java index d65b19e56df..1a155d17964 100644 --- a/core/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java +++ b/core/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.SearchSlowLog; import org.elasticsearch.search.internal.SearchContext; import java.util.HashMap; @@ -179,10 +180,6 @@ public final class ShardSearchStats { totalStats.scrollMetric.inc(System.nanoTime() - context.getOriginNanoTime()); } - public void onRefreshSettings(Settings settings) { - slowLogSearchService.onRefreshSettings(settings); - } - final static class StatsHolder { public final MeanMetric queryMetric = new MeanMetric(); public final MeanMetric fetchMetric = new MeanMetric(); diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index bdbb12245e1..1fb0b7ec4d3 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -91,7 +91,7 @@ import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; -import org.elasticsearch.index.search.stats.SearchSlowLog; +import org.elasticsearch.index.SearchSlowLog; import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.search.stats.ShardSearchStats; import org.elasticsearch.index.similarity.SimilarityService; @@ -237,13 +237,13 @@ public class IndexShard extends AbstractIndexShardComponent { /* create engine config */ logger.debug("state: [CREATED]"); - this.checkIndexOnStartup = settings.get("index.shard.check_on_startup", "false"); + this.checkIndexOnStartup = indexSettings.getValue(IndexSettings.INDEX_CHECK_ON_STARTUP); this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, provider.getBigArrays()); final QueryCachingPolicy cachingPolicy; // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis - if (settings.getAsBoolean(IndexModule.QUERY_CACHE_EVERYTHING, false)) { + if (IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.get(settings)) { cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE; } else { cachingPolicy = new UsageTrackingQueryCachingPolicy(); @@ -1208,7 +1208,7 @@ public class IndexShard extends AbstractIndexShardComponent { BytesStreamOutput os = new BytesStreamOutput(); PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name()); - if ("checksum".equalsIgnoreCase(checkIndexOnStartup)) { + if ("checksum".equals(checkIndexOnStartup)) { // physical verification only: verify all checksums for the latest commit IOException corrupt = null; MetadataSnapshot metadata = store.getMetadata(); @@ -1240,7 +1240,7 @@ public class IndexShard extends AbstractIndexShardComponent { return; } logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); - if ("fix".equalsIgnoreCase(checkIndexOnStartup)) { + if ("fix".equals(checkIndexOnStartup)) { if (logger.isDebugEnabled()) { logger.debug("fixing index, writing new segments file ..."); } diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java index f82b832a152..e3600291318 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.search.stats.SearchSlowLog; +import org.elasticsearch.index.SearchSlowLog; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.TranslogStats; diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index fcc18f8b678..d5d6d5234be 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -33,6 +33,7 @@ import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.Constants; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.IndexModule; @@ -50,7 +51,16 @@ import java.util.Set; public class FsDirectoryService extends DirectoryService implements StoreRateLimiting.Listener, StoreRateLimiting.Provider { protected final IndexStore indexStore; - + public static final Setting INDEX_LOCK_FACTOR_SETTING = new Setting<>("index.store.fs.fs_lock", "native", (s) -> { + switch (s) { + case "native": + return NativeFSLockFactory.INSTANCE; + case "simple": + return SimpleFSLockFactory.INSTANCE; + default: + throw new IllegalArgumentException("unrecognized [index.store.fs.fs_lock] \"" + s + "\": must be native or simple"); + } + }, false, Setting.Scope.INDEX); private final CounterMetric rateLimitingTimeInNanos = new CounterMetric(); private final ShardPath path; @@ -71,29 +81,11 @@ public class FsDirectoryService extends DirectoryService implements StoreRateLim return indexStore.rateLimiting(); } - public static LockFactory buildLockFactory(IndexSettings indexSettings) { - final Settings settings = indexSettings.getSettings(); - String fsLock = settings.get("index.store.fs.lock", settings.get("index.store.fs.fs_lock", "native")); - LockFactory lockFactory; - if (fsLock.equals("native")) { - lockFactory = NativeFSLockFactory.INSTANCE; - } else if (fsLock.equals("simple")) { - lockFactory = SimpleFSLockFactory.INSTANCE; - } else { - throw new IllegalArgumentException("unrecognized fs_lock \"" + fsLock + "\": must be native or simple"); - } - return lockFactory; - } - - protected final LockFactory buildLockFactory() throws IOException { - return buildLockFactory(indexSettings); - } - @Override public Directory newDirectory() throws IOException { final Path location = path.resolveIndex(); Files.createDirectories(location); - Directory wrapped = newFSDirectory(location, buildLockFactory()); + Directory wrapped = newFSDirectory(location, indexSettings.getValue(INDEX_LOCK_FACTOR_SETTING)); return new RateLimitedFSDirectory(wrapped, this, this) ; } @@ -112,7 +104,7 @@ public class FsDirectoryService extends DirectoryService implements StoreRateLim protected Directory newFSDirectory(Path location, LockFactory lockFactory) throws IOException { - final String storeType = indexSettings.getSettings().get(IndexModule.STORE_TYPE, IndexModule.Type.DEFAULT.getSettingsKey()); + final String storeType = indexSettings.getSettings().get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.DEFAULT.getSettingsKey()); if (IndexModule.Type.FS.match(storeType) || IndexModule.Type.DEFAULT.match(storeType)) { final FSDirectory open = FSDirectory.open(location, lockFactory); // use lucene defaults if (open instanceof MMapDirectory && Constants.WINDOWS == false) { diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java index ea6f59b0520..29401fdfd51 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.store; import org.apache.lucene.store.StoreRateLimiting; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.AbstractIndexComponent; @@ -29,32 +30,18 @@ import org.elasticsearch.index.shard.ShardPath; * */ public class IndexStore extends AbstractIndexComponent { - - public static final String INDEX_STORE_THROTTLE_TYPE = "index.store.throttle.type"; - public static final String INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC = "index.store.throttle.max_bytes_per_sec"; + public static final Setting INDEX_STORE_THROTTLE_TYPE_SETTING = new Setting<>("index.store.throttle.type", "none", StoreRateLimiting.Type::fromString, true, Setting.Scope.INDEX) ; + public static final Setting INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("index.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, Setting.Scope.INDEX); protected final IndexStoreConfig indexStoreConfig; - private volatile String rateLimitingType; - private volatile ByteSizeValue rateLimitingThrottle; - private volatile boolean nodeRateLimiting; - private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); public IndexStore(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig) { super(indexSettings); this.indexStoreConfig = indexStoreConfig; - - this.rateLimitingType = indexSettings.getSettings().get(INDEX_STORE_THROTTLE_TYPE, "none"); - if (rateLimitingType.equalsIgnoreCase("node")) { - nodeRateLimiting = true; - } else { - nodeRateLimiting = false; - rateLimiting.setType(rateLimitingType); - } - this.rateLimitingThrottle = indexSettings.getSettings().getAsBytesSize(INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, new ByteSizeValue(0)); - rateLimiting.setMaxRate(rateLimitingThrottle); - - logger.debug("using index.store.throttle.type [{}], with index.store.throttle.max_bytes_per_sec [{}]", rateLimitingType, rateLimitingThrottle); + rateLimiting.setType(indexSettings.getValue(INDEX_STORE_THROTTLE_TYPE_SETTING)); + rateLimiting.setMaxRate(indexSettings.getValue(INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING)); + logger.debug("using index.store.throttle.type [{}], with index.store.throttle.max_bytes_per_sec [{}]", rateLimiting.getType(), rateLimiting.getRateLimiter()); } /** @@ -62,7 +49,7 @@ public class IndexStore extends AbstractIndexComponent { * the node level one (defaults to the node level one). */ public StoreRateLimiting rateLimiting() { - return nodeRateLimiting ? indexStoreConfig.getNodeRateLimiter() : this.rateLimiting; + return rateLimiting.getType() == StoreRateLimiting.Type.NONE ? indexStoreConfig.getNodeRateLimiter() : this.rateLimiting; } /** @@ -72,26 +59,11 @@ public class IndexStore extends AbstractIndexComponent { return new FsDirectoryService(indexSettings, this, path); } - public void onRefreshSettings(Settings settings) { - String rateLimitingType = settings.get(INDEX_STORE_THROTTLE_TYPE, IndexStore.this.rateLimitingType); - if (!rateLimitingType.equals(IndexStore.this.rateLimitingType)) { - logger.info("updating index.store.throttle.type from [{}] to [{}]", IndexStore.this.rateLimitingType, rateLimitingType); - if (rateLimitingType.equalsIgnoreCase("node")) { - IndexStore.this.rateLimitingType = rateLimitingType; - IndexStore.this.nodeRateLimiting = true; - } else { - StoreRateLimiting.Type.fromString(rateLimitingType); - IndexStore.this.rateLimitingType = rateLimitingType; - IndexStore.this.nodeRateLimiting = false; - IndexStore.this.rateLimiting.setType(rateLimitingType); - } - } + public void setType(StoreRateLimiting.Type type) { + rateLimiting.setType(type); + } - ByteSizeValue rateLimitingThrottle = settings.getAsBytesSize(INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, IndexStore.this.rateLimitingThrottle); - if (!rateLimitingThrottle.equals(IndexStore.this.rateLimitingThrottle)) { - logger.info("updating index.store.throttle.max_bytes_per_sec from [{}] to [{}], note, type is [{}]", IndexStore.this.rateLimitingThrottle, rateLimitingThrottle, IndexStore.this.rateLimitingType); - IndexStore.this.rateLimitingThrottle = rateLimitingThrottle; - IndexStore.this.rateLimiting.setMaxRate(rateLimitingThrottle); - } + public void setMaxRate(ByteSizeValue rate) { + rateLimiting.setMaxRate(rate); } } diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java index ed561876735..ab7075afa5b 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; * indices.store.throttle.type or indices.store.throttle.max_bytes_per_sec are reflected immediately * on all referencing {@link IndexStore} instances */ -public class IndexStoreConfig{ +public class IndexStoreConfig { /** * Configures the node / cluster level throttle type. See {@link StoreRateLimiting.Type}. diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index 3b3074e2385..f6e48e718b9 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -61,6 +61,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.Callback; @@ -81,6 +82,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.file.NoSuchFileException; import java.nio.file.Path; +import java.sql.Time; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -126,7 +128,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref static final int VERSION_START = 0; static final int VERSION = VERSION_WRITE_THROWABLE; static final String CORRUPTED = "corrupted_"; - public static final String INDEX_STORE_STATS_REFRESH_INTERVAL = "index.store.stats_refresh_interval"; + public static final Setting INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.store.stats_refresh_interval", TimeValue.timeValueSeconds(10), false, Setting.Scope.INDEX); private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; @@ -154,7 +156,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref this.directory = new StoreDirectory(directoryService.newDirectory(), Loggers.getLogger("index.store.deletes", settings, shardId)); this.shardLock = shardLock; this.onClose = onClose; - final TimeValue refreshInterval = settings.getAsTime(INDEX_STORE_STATS_REFRESH_INTERVAL, TimeValue.timeValueSeconds(10)); + final TimeValue refreshInterval = indexSettings.getValue(INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING); this.statsCache = new StoreStatsCache(refreshInterval, directory, directoryService); logger.debug("store stats are refreshed with refresh_interval [{}]", refreshInterval); diff --git a/core/src/main/java/org/elasticsearch/index/store/StoreModule.java b/core/src/main/java/org/elasticsearch/index/store/StoreModule.java deleted file mode 100644 index fccd2de2e43..00000000000 --- a/core/src/main/java/org/elasticsearch/index/store/StoreModule.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.store; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.env.ShardLock; -import org.elasticsearch.index.shard.ShardPath; - -/** - * - */ -public class StoreModule extends AbstractModule { - private final ShardLock lock; - private final Store.OnClose closeCallback; - private final ShardPath path; - private final Class shardDirectory; - - - public StoreModule(Class shardDirectory, ShardLock lock, Store.OnClose closeCallback, ShardPath path) { - this.shardDirectory = shardDirectory; - this.lock = lock; - this.closeCallback = closeCallback; - this.path = path; - } - - @Override - protected void configure() { - bind(DirectoryService.class).to(shardDirectory).asEagerSingleton(); - bind(Store.class).asEagerSingleton(); - bind(ShardLock.class).toInstance(lock); - bind(Store.OnClose.class).toInstance(closeCallback); - bind(ShardPath.class).toInstance(path); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java new file mode 100644 index 00000000000..c98ea69f87f --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java @@ -0,0 +1,136 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.translog; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.ByteBufferStreamInput; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.file.Path; + +/** + * A base class for all classes that allows reading ops from translog files + */ +public abstract class BaseTranslogReader implements Comparable { + + protected final long generation; + protected final FileChannel channel; + protected final Path path; + protected final long firstOperationOffset; + + public BaseTranslogReader(long generation, FileChannel channel, Path path, long firstOperationOffset) { + assert Translog.parseIdFromFileName(path) == generation : "generation missmatch. Path: " + Translog.parseIdFromFileName(path) + " but generation: " + generation; + + this.generation = generation; + this.path = path; + this.channel = channel; + this.firstOperationOffset = firstOperationOffset; + } + + public long getGeneration() { + return this.generation; + } + + public abstract long sizeInBytes(); + + abstract public int totalOperations(); + + public final long getFirstOperationOffset() { + return firstOperationOffset; + } + + public Translog.Operation read(Translog.Location location) throws IOException { + assert location.generation == generation : "read location's translog generation [" + location.generation + "] is not [" + generation + "]"; + ByteBuffer buffer = ByteBuffer.allocate(location.size); + try (BufferedChecksumStreamInput checksumStreamInput = checksummedStream(buffer, location.translogLocation, location.size, null)) { + return read(checksumStreamInput); + } + } + + /** read the size of the op (i.e., number of bytes, including the op size) written at the given position */ + protected final int readSize(ByteBuffer reusableBuffer, long position) { + // read op size from disk + assert reusableBuffer.capacity() >= 4 : "reusable buffer must have capacity >=4 when reading opSize. got [" + reusableBuffer.capacity() + "]"; + try { + reusableBuffer.clear(); + reusableBuffer.limit(4); + readBytes(reusableBuffer, position); + reusableBuffer.flip(); + // Add an extra 4 to account for the operation size integer itself + final int size = reusableBuffer.getInt() + 4; + final long maxSize = sizeInBytes() - position; + if (size < 0 || size > maxSize) { + throw new TranslogCorruptedException("operation size is corrupted must be [0.." + maxSize + "] but was: " + size); + } + + return size; + } catch (IOException e) { + throw new ElasticsearchException("unexpected exception reading from translog snapshot of " + this.path, e); + } + } + + public Translog.Snapshot newSnapshot() { + return new TranslogSnapshot(generation, channel, path, firstOperationOffset, sizeInBytes(), totalOperations()); + } + + /** + * reads an operation at the given position and returns it. The buffer length is equal to the number + * of bytes reads. + */ + protected final BufferedChecksumStreamInput checksummedStream(ByteBuffer reusableBuffer, long position, int opSize, BufferedChecksumStreamInput reuse) throws IOException { + final ByteBuffer buffer; + if (reusableBuffer.capacity() >= opSize) { + buffer = reusableBuffer; + } else { + buffer = ByteBuffer.allocate(opSize); + } + buffer.clear(); + buffer.limit(opSize); + readBytes(buffer, position); + buffer.flip(); + return new BufferedChecksumStreamInput(new ByteBufferStreamInput(buffer), reuse); + } + + protected Translog.Operation read(BufferedChecksumStreamInput inStream) throws IOException { + return Translog.readOperation(inStream); + } + + /** + * reads bytes at position into the given buffer, filling it. + */ + abstract protected void readBytes(ByteBuffer buffer, long position) throws IOException; + + @Override + public String toString() { + return "translog [" + generation + "][" + path + "]"; + } + + @Override + public int compareTo(BaseTranslogReader o) { + return Long.compare(getGeneration(), o.getGeneration()); + } + + + public Path path() { + return path; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/translog/ChannelReference.java b/core/src/main/java/org/elasticsearch/index/translog/ChannelReference.java deleted file mode 100644 index b3f60a4c89f..00000000000 --- a/core/src/main/java/org/elasticsearch/index/translog/ChannelReference.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.translog; - -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.common.util.Callback; -import org.elasticsearch.common.util.concurrent.AbstractRefCounted; - -import java.io.IOException; -import java.nio.channels.FileChannel; -import java.nio.file.Path; - -final class ChannelReference extends AbstractRefCounted { - private final Path file; - private final FileChannel channel; - protected final long generation; - private final Callback onClose; - - ChannelReference(Path file, long generation, FileChannel channel, Callback onClose) throws IOException { - super(file.toString()); - this.generation = generation; - this.file = file; - this.channel = channel; - this.onClose = onClose; - } - - public long getGeneration() { - return generation; - } - - public Path getPath() { - return this.file; - } - - public FileChannel getChannel() { - return this.channel; - } - - @Override - public String toString() { - return "channel: file [" + file + "], ref count [" + refCount() + "]"; - } - - @Override - protected void closeInternal() { - try { - IOUtils.closeWhileHandlingException(channel); - } finally { - if (onClose != null) { - onClose.handle(this); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReader.java deleted file mode 100644 index 463c5998f1d..00000000000 --- a/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReader.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.translog; - -import java.io.IOException; - -/** - * Version 0 of the translog format, there is no header in this file - */ -@Deprecated -public final class LegacyTranslogReader extends LegacyTranslogReaderBase { - - /** - * Create a snapshot of translog file channel. The length parameter should be consistent with totalOperations and point - * at the end of the last operation in this snapshot. - */ - LegacyTranslogReader(long generation, ChannelReference channelReference, long fileLength) { - super(generation, channelReference, 0, fileLength); - } - - @Override - protected Translog.Operation read(BufferedChecksumStreamInput in) throws IOException { - // read the opsize before an operation. - // Note that this was written & read out side of the stream when this class was used, but it makes things more consistent - // to read this here - in.readInt(); - Translog.Operation.Type type = Translog.Operation.Type.fromId(in.readByte()); - Translog.Operation operation = Translog.newOperationFromType(type); - operation.readFrom(in); - return operation; - } - - - - @Override - protected ImmutableTranslogReader newReader(long generation, ChannelReference channelReference, long firstOperationOffset, long length, int totalOperations) { - assert totalOperations == -1 : "expected unknown but was: " + totalOperations; - assert firstOperationOffset == 0; - return new LegacyTranslogReader(generation, channelReference, length); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReaderBase.java b/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReaderBase.java deleted file mode 100644 index d9e9e17f792..00000000000 --- a/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReaderBase.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.translog; - -import java.io.IOException; -import java.nio.ByteBuffer; - -/** - * Version 1 of the translog format, there is checkpoint and therefore no notion of op count - */ -@Deprecated -class LegacyTranslogReaderBase extends ImmutableTranslogReader { - - /** - * Create a snapshot of translog file channel. The length parameter should be consistent with totalOperations and point - * at the end of the last operation in this snapshot. - * - */ - LegacyTranslogReaderBase(long generation, ChannelReference channelReference, long firstOperationOffset, long fileLength) { - super(generation, channelReference, firstOperationOffset, fileLength, TranslogReader.UNKNOWN_OP_COUNT); - } - - - @Override - protected Translog.Snapshot newReaderSnapshot(final int totalOperations, ByteBuffer reusableBuffer) { - assert totalOperations == -1 : "legacy we had no idea how many ops: " + totalOperations; - return new ReaderSnapshot(totalOperations, reusableBuffer) { - @Override - public Translog.Operation next() throws IOException { - if (position >= sizeInBytes()) { // this is the legacy case.... - return null; - } - try { - return readOperation(); - } catch (TruncatedTranslogException ex) { - return null; // legacy case - } - } - }; - } - - @Override - protected ImmutableTranslogReader newReader(long generation, ChannelReference channelReference, long firstOperationOffset, long length, int totalOperations) { - assert totalOperations == -1 : "expected unknown but was: " + totalOperations; - return new LegacyTranslogReaderBase(generation, channelReference, firstOperationOffset, length); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java b/core/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java index b76214dc2e7..7b1a05e1ac1 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java +++ b/core/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java @@ -19,12 +19,8 @@ package org.elasticsearch.index.translog; -import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.lease.Releasables; - import java.io.IOException; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.Arrays; /** * A snapshot composed out of multiple snapshots @@ -32,8 +28,7 @@ import java.util.concurrent.atomic.AtomicBoolean; final class MultiSnapshot implements Translog.Snapshot { private final Translog.Snapshot[] translogs; - private AtomicBoolean closed = new AtomicBoolean(false); - private final int estimatedTotalOperations; + private final int totalOperations; private int index; /** @@ -41,30 +36,18 @@ final class MultiSnapshot implements Translog.Snapshot { */ MultiSnapshot(Translog.Snapshot[] translogs) { this.translogs = translogs; - int ops = 0; - for (Translog.Snapshot translog : translogs) { - - final int tops = translog.estimatedTotalOperations(); - if (tops == TranslogReader.UNKNOWN_OP_COUNT) { - ops = TranslogReader.UNKNOWN_OP_COUNT; - break; - } - assert tops >= 0 : "tops must be positive but was: " + tops; - ops += tops; - } - estimatedTotalOperations = ops; + totalOperations = Arrays.stream(translogs).mapToInt(Translog.Snapshot::totalOperations).sum(); index = 0; } @Override - public int estimatedTotalOperations() { - return estimatedTotalOperations; + public int totalOperations() { + return totalOperations; } @Override public Translog.Operation next() throws IOException { - ensureOpen(); for (; index < translogs.length; index++) { final Translog.Snapshot current = translogs[index]; Translog.Operation op = current.next(); @@ -74,17 +57,4 @@ final class MultiSnapshot implements Translog.Snapshot { } return null; } - - protected void ensureOpen() { - if (closed.get()) { - throw new AlreadyClosedException("snapshot already closed"); - } - } - - @Override - public void close() throws ElasticsearchException { - if (closed.compareAndSet(false, true)) { - Releasables.close(translogs); - } - } } diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 0001fda0752..b2e81de044b 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -34,12 +34,9 @@ import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.ReleasableLock; @@ -53,7 +50,6 @@ import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; -import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; @@ -69,6 +65,8 @@ import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * A Translog is a per index shard component that records all non-committed index operations in a durable manner. @@ -112,29 +110,25 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC static final Pattern PARSE_STRICT_ID_PATTERN = Pattern.compile("^" + TRANSLOG_FILE_PREFIX + "(\\d+)(\\.tlog)$"); - private final List recoveredTranslogs; + // the list of translog readers is guaranteed to be in order of translog generation + private final List readers = new ArrayList<>(); private volatile ScheduledFuture syncScheduler; // this is a concurrent set and is not protected by any of the locks. The main reason - // is that is being accessed by two separate classes (additions & reading are done by FsTranslog, remove by FsView when closed) + // is that is being accessed by two separate classes (additions & reading are done by Translog, remove by View when closed) private final Set outstandingViews = ConcurrentCollections.newConcurrentSet(); private BigArrays bigArrays; protected final ReleasableLock readLock; protected final ReleasableLock writeLock; private final Path location; private TranslogWriter current; - private volatile ImmutableTranslogReader currentCommittingTranslog; - private volatile long lastCommittedTranslogFileGeneration = -1; // -1 is safe as it will not cause an translog deletion. + + private final static long NOT_SET_GENERATION = -1; // -1 is safe as it will not cause a translog deletion. + + private volatile long currentCommittingGeneration = NOT_SET_GENERATION; + private volatile long lastCommittedTranslogFileGeneration = NOT_SET_GENERATION; private final AtomicBoolean closed = new AtomicBoolean(); private final TranslogConfig config; private final String translogUUID; - private Callback onViewClose = new Callback() { - @Override - public void handle(View view) { - logger.trace("closing view starting at translog [{}]", view.minTranslogGeneration()); - boolean removed = outstandingViews.remove(view); - assert removed : "View was never set but was supposed to be removed"; - } - }; /** @@ -176,11 +170,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC // if not we don't even try to clean it up and wait until we fail creating it assert Files.exists(nextTranslogFile) == false || Files.size(nextTranslogFile) <= TranslogWriter.getHeaderLength(translogUUID) : "unexpected translog file: [" + nextTranslogFile + "]"; if (Files.exists(currentCheckpointFile) // current checkpoint is already copied - && Files.deleteIfExists(nextTranslogFile)) { // delete it and log a warning + && Files.deleteIfExists(nextTranslogFile)) { // delete it and log a warning logger.warn("deleted previously created, but not yet committed, next generation [{}]. This can happen due to a tragic exception when creating a new generation", nextTranslogFile.getFileName()); } - this.recoveredTranslogs = recoverFromFiles(translogGeneration, checkpoint); - if (recoveredTranslogs.isEmpty()) { + this.readers.addAll(recoverFromFiles(translogGeneration, checkpoint)); + if (readers.isEmpty()) { throw new IllegalStateException("at least one reader must be recovered"); } boolean success = false; @@ -193,11 +187,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC // for instance if we have a lot of tlog and we can't create the writer we keep on holding // on to all the uncommitted tlog files if we don't close if (success == false) { - IOUtils.closeWhileHandlingException(recoveredTranslogs); + IOUtils.closeWhileHandlingException(readers); } } } else { - this.recoveredTranslogs = Collections.emptyList(); IOUtils.rm(location); logger.debug("wipe translog location - creating new translog"); Files.createDirectories(location); @@ -205,21 +198,22 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC Checkpoint checkpoint = new Checkpoint(0, 0, generation); Checkpoint.write(location.resolve(CHECKPOINT_FILE_NAME), checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW); current = createWriter(generation); - this.lastCommittedTranslogFileGeneration = -1; // playing safe + this.lastCommittedTranslogFileGeneration = NOT_SET_GENERATION; } // now that we know which files are there, create a new current one. } catch (Throwable t) { // close the opened translog files if we fail to create a new translog... - IOUtils.closeWhileHandlingException(currentCommittingTranslog, current); + IOUtils.closeWhileHandlingException(current); + IOUtils.closeWhileHandlingException(readers); throw t; } } /** recover all translog files found on disk */ - private final ArrayList recoverFromFiles(TranslogGeneration translogGeneration, Checkpoint checkpoint) throws IOException { + private final ArrayList recoverFromFiles(TranslogGeneration translogGeneration, Checkpoint checkpoint) throws IOException { boolean success = false; - ArrayList foundTranslogs = new ArrayList<>(); + ArrayList foundTranslogs = new ArrayList<>(); final Path tempFile = Files.createTempFile(location, TRANSLOG_FILE_PREFIX, TRANSLOG_FILE_SUFFIX); // a temp file to copy checkpoint to - note it must be in on the same FS otherwise atomic move won't work boolean tempFileRenamed = false; try (ReleasableLock lock = writeLock.acquire()) { @@ -230,7 +224,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (Files.exists(committedTranslogFile) == false) { throw new IllegalStateException("translog file doesn't exist with generation: " + i + " lastCommitted: " + lastCommittedTranslogFileGeneration + " checkpoint: " + checkpoint.generation + " - translog ids must be consecutive"); } - final ImmutableTranslogReader reader = openReader(committedTranslogFile, Checkpoint.read(location.resolve(getCommitCheckpointFileName(i)))); + final TranslogReader reader = openReader(committedTranslogFile, Checkpoint.read(location.resolve(getCommitCheckpointFileName(i)))); foundTranslogs.add(reader); logger.debug("recovered local translog from checkpoint {}", checkpoint); } @@ -267,17 +261,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return foundTranslogs; } - ImmutableTranslogReader openReader(Path path, Checkpoint checkpoint) throws IOException { - final long generation; - try { - generation = parseIdFromFileName(path); - } catch (IllegalArgumentException ex) { - throw new TranslogException(shardId, "failed to parse generation from file name matching pattern " + path, ex); - } + TranslogReader openReader(Path path, Checkpoint checkpoint) throws IOException { FileChannel channel = FileChannel.open(path, StandardOpenOption.READ); try { - final ChannelReference raf = new ChannelReference(path, generation, channel, new OnCloseRunnable()); - ImmutableTranslogReader reader = ImmutableTranslogReader.open(raf, checkpoint, translogUUID); + assert Translog.parseIdFromFileName(path) == checkpoint.generation : "expected generation: " + Translog.parseIdFromFileName(path) + " but got: " + checkpoint.generation; + TranslogReader reader = TranslogReader.open(channel, path, checkpoint, translogUUID); channel = null; return reader; } finally { @@ -315,12 +303,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC try { current.sync(); } finally { - try { - IOUtils.close(current, currentCommittingTranslog); - } finally { - IOUtils.close(recoveredTranslogs); - recoveredTranslogs.clear(); - } + closeFilesIfNoPendingViews(); } } finally { FutureUtils.cancel(syncScheduler); @@ -349,41 +332,49 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC /** * Returns the number of operations in the transaction files that aren't committed to lucene.. - * Note: may return -1 if unknown */ public int totalOperations() { - int ops = 0; - try (ReleasableLock lock = readLock.acquire()) { - ops += current.totalOperations(); - if (currentCommittingTranslog != null) { - int tops = currentCommittingTranslog.totalOperations(); - assert tops != TranslogReader.UNKNOWN_OP_COUNT; - assert tops >= 0; - ops += tops; - } - } - return ops; + return totalOperations(lastCommittedTranslogFileGeneration); } /** * Returns the size in bytes of the translog files that aren't committed to lucene. */ public long sizeInBytes() { - long size = 0; - try (ReleasableLock lock = readLock.acquire()) { - size += current.sizeInBytes(); - if (currentCommittingTranslog != null) { - size += currentCommittingTranslog.sizeInBytes(); - } + return sizeInBytes(lastCommittedTranslogFileGeneration); + } + + /** + * Returns the number of operations in the transaction files that aren't committed to lucene.. + */ + private int totalOperations(long minGeneration) { + try (ReleasableLock ignored = readLock.acquire()) { + ensureOpen(); + return Stream.concat(readers.stream(), Stream.of(current)) + .filter(r -> r.getGeneration() >= minGeneration) + .mapToInt(BaseTranslogReader::totalOperations) + .sum(); + } + } + + /** + * Returns the size in bytes of the translog files that aren't committed to lucene. + */ + private long sizeInBytes(long minGeneration) { + try (ReleasableLock ignored = readLock.acquire()) { + ensureOpen(); + return Stream.concat(readers.stream(), Stream.of(current)) + .filter(r -> r.getGeneration() >= minGeneration) + .mapToLong(BaseTranslogReader::sizeInBytes) + .sum(); } - return size; } TranslogWriter createWriter(long fileGeneration) throws IOException { TranslogWriter newFile; try { - newFile = TranslogWriter.create(shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), getChannelFactory(), config.getBufferSize()); + newFile = TranslogWriter.create(shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), getChannelFactory(), config.getBufferSize()); } catch (IOException e) { throw new TranslogException(shardId, "failed to create new translog file", e); } @@ -398,12 +389,12 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC */ public Translog.Operation read(Location location) { try (ReleasableLock lock = readLock.acquire()) { - final TranslogReader reader; + final BaseTranslogReader reader; final long currentGeneration = current.getGeneration(); if (currentGeneration == location.generation) { reader = current; - } else if (currentCommittingTranslog != null && currentCommittingTranslog.getGeneration() == location.generation) { - reader = currentCommittingTranslog; + } else if (readers.isEmpty() == false && readers.get(readers.size() - 1).getGeneration() == location.generation) { + reader = readers.get(readers.size() - 1); } else if (currentGeneration < location.generation) { throw new IllegalStateException("location generation [" + location.generation + "] is greater than the current generation [" + currentGeneration + "]"); } else { @@ -467,33 +458,16 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * Snapshots are fixed in time and will not be updated with future operations. */ public Snapshot newSnapshot() { - ensureOpen(); - try (ReleasableLock lock = readLock.acquire()) { - ArrayList toOpen = new ArrayList<>(); - toOpen.addAll(recoveredTranslogs); - if (currentCommittingTranslog != null) { - toOpen.add(currentCommittingTranslog); - } - toOpen.add(current); - return createSnapshot(toOpen.toArray(new TranslogReader[toOpen.size()])); - } + return createSnapshot(Long.MIN_VALUE); } - private static Snapshot createSnapshot(TranslogReader... translogs) { - Snapshot[] snapshots = new Snapshot[translogs.length]; - boolean success = false; - try { - for (int i = 0; i < translogs.length; i++) { - snapshots[i] = translogs[i].newSnapshot(); - } - - Snapshot snapshot = new MultiSnapshot(snapshots); - success = true; - return snapshot; - } finally { - if (success == false) { - Releasables.close(snapshots); - } + private Snapshot createSnapshot(long minGeneration) { + try (ReleasableLock ignored = readLock.acquire()) { + ensureOpen(); + Snapshot[] snapshots = Stream.concat(readers.stream(), Stream.of(current)) + .filter(reader -> reader.getGeneration() >= minGeneration) + .map(BaseTranslogReader::newSnapshot).toArray(Snapshot[]::new); + return new MultiSnapshot(snapshots); } } @@ -502,25 +476,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * while receiving future ones as well */ public Translog.View newView() { - // we need to acquire the read lock to make sure no new translog is created - // and will be missed by the view we're making try (ReleasableLock lock = readLock.acquire()) { - ArrayList translogs = new ArrayList<>(); - try { - if (currentCommittingTranslog != null) { - translogs.add(currentCommittingTranslog.clone()); - } - translogs.add(current.newReaderFromWriter()); - View view = new View(translogs, onViewClose); - // this is safe as we know that no new translog is being made at the moment - // (we hold a read lock) and the view will be notified of any future one - outstandingViews.add(view); - translogs.clear(); - return view; - } finally { - // close if anything happend and we didn't reach the clear - IOUtils.closeWhileHandlingException(translogs); - } + ensureOpen(); + View view = new View(lastCommittedTranslogFileGeneration); + outstandingViews.add(view); + return view; } } @@ -561,7 +521,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC */ public boolean ensureSynced(Location location) throws IOException { try (ReleasableLock lock = readLock.acquire()) { - if (location.generation == current.generation) { // if we have a new one it's already synced + if (location.generation == current.getGeneration()) { // if we have a new one it's already synced ensureOpen(); return current.syncUpTo(location.translogLocation + location.size); } @@ -604,151 +564,67 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return config; } - - private final class OnCloseRunnable implements Callback { - @Override - public void handle(ChannelReference channelReference) { - if (isReferencedGeneration(channelReference.getGeneration()) == false) { - Path translogPath = channelReference.getPath(); - assert channelReference.getPath().getParent().equals(location) : "translog files must be in the location folder: " + location + " but was: " + translogPath; - // if the given translogPath is not the current we can safely delete the file since all references are released - logger.trace("delete translog file - not referenced and not current anymore {}", translogPath); - IOUtils.deleteFilesIgnoringExceptions(translogPath); - IOUtils.deleteFilesIgnoringExceptions(translogPath.resolveSibling(getCommitCheckpointFileName(channelReference.getGeneration()))); - - } - try (DirectoryStream stream = Files.newDirectoryStream(location)) { - for (Path path : stream) { - Matcher matcher = PARSE_STRICT_ID_PATTERN.matcher(path.getFileName().toString()); - if (matcher.matches()) { - long generation = Long.parseLong(matcher.group(1)); - if (isReferencedGeneration(generation) == false) { - logger.trace("delete translog file - not referenced and not current anymore {}", path); - IOUtils.deleteFilesIgnoringExceptions(path); - IOUtils.deleteFilesIgnoringExceptions(path.resolveSibling(getCommitCheckpointFileName(channelReference.getGeneration()))); - } - } - } - } catch (IOException e) { - logger.warn("failed to delete unreferenced translog files", e); - } - } - } - /** * a view into the translog, capturing all translog file at the moment of creation * and updated with any future translog. */ - public static final class View implements Closeable { - public static final Translog.View EMPTY_VIEW = new View(Collections.emptyList(), null); + /** + * a view into the translog, capturing all translog file at the moment of creation + * and updated with any future translog. + */ + public class View implements Closeable { - boolean closed; - // last in this list is always FsTranslog.current - final List orderedTranslogs; - private final Callback onClose; + AtomicBoolean closed = new AtomicBoolean(); + final long minGeneration; - View(List orderedTranslogs, Callback onClose) { - // clone so we can safely mutate.. - this.orderedTranslogs = new ArrayList<>(orderedTranslogs); - this.onClose = onClose; - } - - /** - * Called by the parent class when ever the current translog changes - * - * @param oldCurrent a new read only reader for the old current (should replace the previous reference) - * @param newCurrent a reader into the new current. - */ - synchronized void onNewTranslog(TranslogReader oldCurrent, TranslogReader newCurrent) throws IOException { - // even though the close method removes this view from outstandingViews, there is no synchronisation in place - // between that operation and an ongoing addition of a new translog, already having an iterator. - // As such, this method can be called despite of the fact that we are closed. We need to check and ignore. - if (closed) { - // we have to close the new references created for as as we will not hold them - IOUtils.close(oldCurrent, newCurrent); - return; - } - orderedTranslogs.remove(orderedTranslogs.size() - 1).close(); - orderedTranslogs.add(oldCurrent); - orderedTranslogs.add(newCurrent); + View(long minGeneration) { + this.minGeneration = minGeneration; } /** this smallest translog generation in this view */ - public synchronized long minTranslogGeneration() { - ensureOpen(); - return orderedTranslogs.get(0).getGeneration(); + public long minTranslogGeneration() { + return minGeneration; } /** * The total number of operations in the view. */ - public synchronized int totalOperations() { - int ops = 0; - for (TranslogReader translog : orderedTranslogs) { - int tops = translog.totalOperations(); - if (tops == TranslogReader.UNKNOWN_OP_COUNT) { - return -1; - } - assert tops >= 0; - ops += tops; - } - return ops; + public int totalOperations() { + return Translog.this.totalOperations(minGeneration); } /** * Returns the size in bytes of the files behind the view. */ - public synchronized long sizeInBytes() { - long size = 0; - for (TranslogReader translog : orderedTranslogs) { - size += translog.sizeInBytes(); - } - return size; + public long sizeInBytes() { + return Translog.this.sizeInBytes(minGeneration); } /** create a snapshot from this view */ - public synchronized Snapshot snapshot() { + public Snapshot snapshot() { ensureOpen(); - return createSnapshot(orderedTranslogs.toArray(new TranslogReader[orderedTranslogs.size()])); + return Translog.this.createSnapshot(minGeneration); } - void ensureOpen() { - if (closed) { - throw new ElasticsearchException("View is already closed"); + if (closed.get()) { + throw new AlreadyClosedException("View is already closed"); } } @Override - public void close() { - final List toClose = new ArrayList<>(); - try { - synchronized (this) { - if (closed == false) { - try { - if (onClose != null) { - onClose.handle(this); - } - } finally { - closed = true; - toClose.addAll(orderedTranslogs); - orderedTranslogs.clear(); - } - } - } - } finally { - try { - // Close out of lock to prevent deadlocks between channel close which checks for - // references in InternalChannelReference.closeInternal (waiting on a read lock) - // and other FsTranslog#newTranslog calling FsView.onNewTranslog (while having a write lock) - IOUtils.close(toClose); - } catch (Exception e) { - throw new ElasticsearchException("failed to close view", e); - } + public void close() throws IOException { + if (closed.getAndSet(true) == false) { + logger.trace("closing view starting at translog [{}]", minTranslogGeneration()); + boolean removed = outstandingViews.remove(this); + assert removed : "View was never set but was supposed to be removed"; + trimUnreferencedReaders(); + closeFilesIfNoPendingViews(); } } } + public static class Location implements Accountable, Comparable { public final long generation; @@ -817,12 +693,12 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC /** * A snapshot of the transaction log, allows to iterate over all the transaction log operations. */ - public interface Snapshot extends Releasable { + public interface Snapshot { /** * The total number of operations in the translog. */ - int estimatedTotalOperations(); + int totalOperations(); /** * Returns the next operation in the snapshot or null if we reached the end. @@ -1320,13 +1196,12 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC public void prepareCommit() throws IOException { try (ReleasableLock lock = writeLock.acquire()) { ensureOpen(); - if (currentCommittingTranslog != null) { - throw new IllegalStateException("already committing a translog with generation: " + currentCommittingTranslog.getGeneration()); + if (currentCommittingGeneration != NOT_SET_GENERATION) { + throw new IllegalStateException("already committing a translog with generation: " + currentCommittingGeneration); } - final TranslogWriter oldCurrent = current; - oldCurrent.ensureOpen(); - oldCurrent.sync(); - currentCommittingTranslog = current.immutableReader(); + currentCommittingGeneration = current.getGeneration(); + TranslogReader currentCommittingTranslog = current.closeIntoReader(); + readers.add(currentCommittingTranslog); Path checkpoint = location.resolve(CHECKPOINT_FILE_NAME); assert Checkpoint.read(checkpoint).generation == currentCommittingTranslog.getGeneration(); Path commitCheckpoint = location.resolve(getCommitCheckpointFileName(currentCommittingTranslog.getGeneration())); @@ -1335,14 +1210,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC IOUtils.fsync(commitCheckpoint.getParent(), true); // create a new translog file - this will sync it and update the checkpoint data; current = createWriter(current.getGeneration() + 1); - // notify all outstanding views of the new translog (no views are created now as - // we hold a write lock). - for (View view : outstandingViews) { - view.onNewTranslog(currentCommittingTranslog.clone(), current.newReaderFromWriter()); - } - IOUtils.close(oldCurrent); logger.trace("current translog set to [{}]", current.getGeneration()); - assert oldCurrent.syncNeeded() == false : "old translog oldCurrent must not need a sync"; } catch (Throwable t) { IOUtils.closeWhileHandlingException(this); // tragic event @@ -1352,24 +1220,53 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC @Override public void commit() throws IOException { - ImmutableTranslogReader toClose = null; try (ReleasableLock lock = writeLock.acquire()) { ensureOpen(); - if (currentCommittingTranslog == null) { + if (currentCommittingGeneration == NOT_SET_GENERATION) { prepareCommit(); } + assert currentCommittingGeneration != NOT_SET_GENERATION; + assert readers.stream().filter(r -> r.getGeneration() == currentCommittingGeneration).findFirst().isPresent() + : "reader list doesn't contain committing generation [" + currentCommittingGeneration + "]"; lastCommittedTranslogFileGeneration = current.getGeneration(); // this is important - otherwise old files will not be cleaned up - if (recoveredTranslogs.isEmpty() == false) { - IOUtils.close(recoveredTranslogs); - recoveredTranslogs.clear(); - } - toClose = this.currentCommittingTranslog; - this.currentCommittingTranslog = null; - } finally { - IOUtils.close(toClose); + currentCommittingGeneration = NOT_SET_GENERATION; + trimUnreferencedReaders(); } } + void trimUnreferencedReaders() { + try (ReleasableLock ignored = writeLock.acquire()) { + if (closed.get()) { + // we're shutdown potentially on some tragic event - don't delete anything + return; + } + long minReferencedGen = outstandingViews.stream().mapToLong(View::minTranslogGeneration).min().orElse(Long.MAX_VALUE); + minReferencedGen = Math.min(lastCommittedTranslogFileGeneration, minReferencedGen); + final long finalMinReferencedGen = minReferencedGen; + List unreferenced = readers.stream().filter(r -> r.getGeneration() < finalMinReferencedGen).collect(Collectors.toList()); + for (final TranslogReader unreferencedReader : unreferenced) { + Path translogPath = unreferencedReader.path(); + logger.trace("delete translog file - not referenced and not current anymore {}", translogPath); + IOUtils.closeWhileHandlingException(unreferencedReader); + IOUtils.deleteFilesIgnoringExceptions(translogPath, + translogPath.resolveSibling(getCommitCheckpointFileName(unreferencedReader.getGeneration()))); + } + readers.removeAll(unreferenced); + } + } + + void closeFilesIfNoPendingViews() throws IOException { + try (ReleasableLock ignored = writeLock.acquire()) { + if (closed.get() && outstandingViews.isEmpty()) { + logger.trace("closing files. translog is closed and there are no pending views"); + ArrayList toClose = new ArrayList<>(readers); + toClose.add(current); + IOUtils.close(toClose); + } + } + } + + @Override public void rollback() throws IOException { ensureOpen(); @@ -1435,9 +1332,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return TranslogWriter.ChannelFactory.DEFAULT; } - /** If this {@code Translog} was closed as a side-effect of a tragic exception, - * e.g. disk full while flushing a new segment, this returns the root cause exception. - * Otherwise (no tragic exception has occurred) it returns null. */ + /** + * If this {@code Translog} was closed as a side-effect of a tragic exception, + * e.g. disk full while flushing a new segment, this returns the root cause exception. + * Otherwise (no tragic exception has occurred) it returns null. + */ public Throwable getTragicException() { return current.getTragicException(); } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java index 71dff6ec36e..ecc3822361c 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java @@ -27,161 +27,46 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.InputStreamDataInput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.ByteBufferStreamInput; +import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import java.io.Closeable; +import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; -import java.nio.channels.Channels; import java.nio.channels.FileChannel; -import java.nio.file.Files; import java.nio.file.Path; import java.util.concurrent.atomic.AtomicBoolean; /** - * A base class for all classes that allows reading ops from translog files + * an immutable translog filereader */ -public abstract class TranslogReader implements Closeable, Comparable { - public static final int UNKNOWN_OP_COUNT = -1; +public class TranslogReader extends BaseTranslogReader implements Closeable { private static final byte LUCENE_CODEC_HEADER_BYTE = 0x3f; private static final byte UNVERSIONED_TRANSLOG_HEADER_BYTE = 0x00; - protected final long generation; - protected final ChannelReference channelReference; - protected final FileChannel channel; + + private final int totalOperations; + protected final long length; protected final AtomicBoolean closed = new AtomicBoolean(false); - protected final long firstOperationOffset; - public TranslogReader(long generation, ChannelReference channelReference, long firstOperationOffset) { - this.generation = generation; - this.channelReference = channelReference; - this.channel = channelReference.getChannel(); - this.firstOperationOffset = firstOperationOffset; - } - - public long getGeneration() { - return this.generation; - } - - public abstract long sizeInBytes(); - - abstract public int totalOperations(); - - public final long getFirstOperationOffset() { - return firstOperationOffset; - } - - public Translog.Operation read(Translog.Location location) throws IOException { - assert location.generation == generation : "read location's translog generation [" + location.generation + "] is not [" + generation + "]"; - ByteBuffer buffer = ByteBuffer.allocate(location.size); - try (BufferedChecksumStreamInput checksumStreamInput = checksummedStream(buffer, location.translogLocation, location.size, null)) { - return read(checksumStreamInput); - } - } - - /** read the size of the op (i.e., number of bytes, including the op size) written at the given position */ - private final int readSize(ByteBuffer reusableBuffer, long position) { - // read op size from disk - assert reusableBuffer.capacity() >= 4 : "reusable buffer must have capacity >=4 when reading opSize. got [" + reusableBuffer.capacity() + "]"; - try { - reusableBuffer.clear(); - reusableBuffer.limit(4); - readBytes(reusableBuffer, position); - reusableBuffer.flip(); - // Add an extra 4 to account for the operation size integer itself - final int size = reusableBuffer.getInt() + 4; - final long maxSize = sizeInBytes() - position; - if (size < 0 || size > maxSize) { - throw new TranslogCorruptedException("operation size is corrupted must be [0.." + maxSize + "] but was: " + size); - } - - return size; - } catch (IOException e) { - throw new ElasticsearchException("unexpected exception reading from translog snapshot of " + this.channelReference.getPath(), e); - } - } - - public Translog.Snapshot newSnapshot() { - final ByteBuffer reusableBuffer = ByteBuffer.allocate(1024); - final int totalOperations = totalOperations(); - channelReference.incRef(); - return newReaderSnapshot(totalOperations, reusableBuffer); + /** + * Create a reader of translog file channel. The length parameter should be consistent with totalOperations and point + * at the end of the last operation in this snapshot. + */ + public TranslogReader(long generation, FileChannel channel, Path path, long firstOperationOffset, long length, int totalOperations) { + super(generation, channel, path, firstOperationOffset); + this.length = length; + this.totalOperations = totalOperations; } /** - * reads an operation at the given position and returns it. The buffer length is equal to the number - * of bytes reads. + * Given a file, opens an {@link TranslogReader}, taking of checking and validating the file header. */ - private final BufferedChecksumStreamInput checksummedStream(ByteBuffer reusableBuffer, long position, int opSize, BufferedChecksumStreamInput reuse) throws IOException { - final ByteBuffer buffer; - if (reusableBuffer.capacity() >= opSize) { - buffer = reusableBuffer; - } else { - buffer = ByteBuffer.allocate(opSize); - } - buffer.clear(); - buffer.limit(opSize); - readBytes(buffer, position); - buffer.flip(); - return new BufferedChecksumStreamInput(new ByteBufferStreamInput(buffer), reuse); - } - - protected Translog.Operation read(BufferedChecksumStreamInput inStream) throws IOException { - return Translog.readOperation(inStream); - } - - /** - * reads bytes at position into the given buffer, filling it. - */ - abstract protected void readBytes(ByteBuffer buffer, long position) throws IOException; - - @Override - public final void close() throws IOException { - if (closed.compareAndSet(false, true)) { - channelReference.decRef(); - } - } - - protected final boolean isClosed() { - return closed.get(); - } - - protected void ensureOpen() { - if (isClosed()) { - throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed"); - } - } - - @Override - public String toString() { - return "translog [" + generation + "][" + channelReference.getPath() + "]"; - } - - @Override - public int compareTo(TranslogReader o) { - return Long.compare(getGeneration(), o.getGeneration()); - } - - - /** - * Given a file, return a VersionedTranslogStream based on an - * optionally-existing header in the file. If the file does not exist, or - * has zero length, returns the latest version. If the header does not - * exist, assumes Version 0 of the translog file format. - */ - public static ImmutableTranslogReader open(ChannelReference channelReference, Checkpoint checkpoint, String translogUUID) throws IOException { - final FileChannel channel = channelReference.getChannel(); - final Path path = channelReference.getPath(); - assert channelReference.getGeneration() == checkpoint.generation : "expected generation: " + channelReference.getGeneration() + " but got: " + checkpoint.generation; + public static TranslogReader open(FileChannel channel, Path path, Checkpoint checkpoint, String translogUUID) throws IOException { try { - if (checkpoint.offset == 0 && checkpoint.numOps == TranslogReader.UNKNOWN_OP_COUNT) { // only old files can be empty - return new LegacyTranslogReader(channelReference.getGeneration(), channelReference, 0); - } - - InputStreamStreamInput headerStream = new InputStreamStreamInput(Channels.newInputStream(channel)); // don't close + InputStreamStreamInput headerStream = new InputStreamStreamInput(java.nio.channels.Channels.newInputStream(channel)); // don't close // Lucene's CodecUtil writes a magic number of 0x3FD76C17 with the // header, in binary this looks like: // @@ -208,20 +93,17 @@ public abstract class TranslogReader implements Closeable, Comparable TranslogReader.UNKNOWN_OP_COUNT: "expected at least 0 operatin but got: " + checkpoint.numOps; + assert checkpoint.numOps >= 0 : "expected at least 0 operatin but got: " + checkpoint.numOps; assert checkpoint.offset <= channel.size() : "checkpoint is inconsistent with channel length: " + channel.size() + " " + checkpoint; int len = headerStream.readInt(); if (len > channel.size()) { @@ -232,78 +114,61 @@ public abstract class TranslogReader implements Closeable, Comparable= length) { + throw new EOFException("read requested past EOF. pos [" + position + "] end: [" + length + "]"); } - - @Override - public final int estimatedTotalOperations() { - return totalOperations; + if (position < firstOperationOffset) { + throw new IOException("read requested before position of first ops. pos [" + position + "] first op on: [" + firstOperationOffset + "]"); } + Channels.readFromFileChannelWithEofException(channel, position, buffer); + } - @Override - public Translog.Operation next() throws IOException { - if (readOperations < totalOperations) { - assert readOperations < totalOperations : "readOpeartions must be less than totalOperations"; - return readOperation(); - } else { - return null; - } + public Checkpoint getInfo() { + return new Checkpoint(length, totalOperations, getGeneration()); + } + + @Override + public final void close() throws IOException { + if (closed.compareAndSet(false, true)) { + channel.close(); } + } - protected final Translog.Operation readOperation() throws IOException { - final int opSize = readSize(reusableBuffer, position); - reuse = checksummedStream(reusableBuffer, position, opSize, reuse); - Translog.Operation op = read(reuse); - position += opSize; - readOperations++; - return op; - } + protected final boolean isClosed() { + return closed.get(); + } - @Override - public void close() { - if (closed.compareAndSet(false, true)) { - channelReference.decRef(); - } + protected void ensureOpen() { + if (isClosed()) { + throw new AlreadyClosedException(toString() + " is already closed"); } } } diff --git a/core/src/main/java/org/elasticsearch/index/translog/ImmutableTranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java similarity index 50% rename from core/src/main/java/org/elasticsearch/index/translog/ImmutableTranslogReader.java rename to core/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java index 1d6d3b45a63..10f381f8eba 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/ImmutableTranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - package org.elasticsearch.index.translog; import org.elasticsearch.common.io.Channels; @@ -24,68 +23,82 @@ import org.elasticsearch.common.io.Channels; import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.file.Path; -/** - * a translog reader which is fixed in length - */ -public class ImmutableTranslogReader extends TranslogReader { - +public class TranslogSnapshot extends BaseTranslogReader implements Translog.Snapshot { private final int totalOperations; protected final long length; + private final ByteBuffer reusableBuffer; + private long position; + private int readOperations; + private BufferedChecksumStreamInput reuse; + + /** * Create a snapshot of translog file channel. The length parameter should be consistent with totalOperations and point * at the end of the last operation in this snapshot. */ - public ImmutableTranslogReader(long generation, ChannelReference channelReference, long firstOperationOffset, long length, int totalOperations) { - super(generation, channelReference, firstOperationOffset); + public TranslogSnapshot(long generation, FileChannel channel, Path path, long firstOperationOffset, long length, int totalOperations) { + super(generation, channel, path, firstOperationOffset); this.length = length; this.totalOperations = totalOperations; + this.reusableBuffer = ByteBuffer.allocate(1024); + readOperations = 0; + position = firstOperationOffset; + reuse = null; } @Override - public final TranslogReader clone() { - if (channelReference.tryIncRef()) { - try { - ImmutableTranslogReader reader = newReader(generation, channelReference, firstOperationOffset, length, totalOperations); - channelReference.incRef(); // for the new object - return reader; - } finally { - channelReference.decRef(); - } + public final int totalOperations() { + return totalOperations; + } + + @Override + public Translog.Operation next() throws IOException { + if (readOperations < totalOperations) { + return readOperation(); } else { - throw new IllegalStateException("can't increment translog [" + generation + "] channel ref count"); + return null; } } - - protected ImmutableTranslogReader newReader(long generation, ChannelReference channelReference, long offset, long length, int totalOperations) { - return new ImmutableTranslogReader(generation, channelReference, offset, length, totalOperations); + protected final Translog.Operation readOperation() throws IOException { + final int opSize = readSize(reusableBuffer, position); + reuse = checksummedStream(reusableBuffer, position, opSize, reuse); + Translog.Operation op = read(reuse); + position += opSize; + readOperations++; + return op; } + public long sizeInBytes() { return length; } - public int totalOperations() { - return totalOperations; - } - /** * reads an operation at the given position into the given buffer. */ protected void readBytes(ByteBuffer buffer, long position) throws IOException { if (position >= length) { - throw new EOFException("read requested past EOF. pos [" + position + "] end: [" + length + "]"); + throw new EOFException("read requested past EOF. pos [" + position + "] end: [" + length + "], generation: [" + getGeneration() + "], path: [" + path + "]"); } - if (position < firstOperationOffset) { - throw new IOException("read requested before position of first ops. pos [" + position + "] first op on: [" + firstOperationOffset + "]"); + if (position < getFirstOperationOffset()) { + throw new IOException("read requested before position of first ops. pos [" + position + "] first op on: [" + getFirstOperationOffset() + "], generation: [" + getGeneration() + "], path: [" + path + "]"); } Channels.readFromFileChannelWithEofException(channel, position, buffer); } - public Checkpoint getInfo() { - return new Checkpoint(length, totalOperations, getGeneration()); + @Override + public String toString() { + return "TranslogSnapshot{" + + "readOperations=" + readOperations + + ", position=" + position + + ", totalOperations=" + totalOperations + + ", length=" + length + + ", reusableBuffer=" + reusableBuffer + + '}'; } - -} +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 517e4a5b30e..f7d0cd571e8 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -25,24 +25,23 @@ import org.apache.lucene.store.OutputStreamDataOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.util.Callback; import org.elasticsearch.index.shard.ShardId; import java.io.BufferedOutputStream; +import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; -import java.nio.file.Files; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import java.util.concurrent.atomic.AtomicBoolean; -public class TranslogWriter extends TranslogReader { +public class TranslogWriter extends BaseTranslogReader implements Closeable { public static final String TRANSLOG_CODEC = "translog"; public static final int VERSION_CHECKSUMS = 1; @@ -61,11 +60,14 @@ public class TranslogWriter extends TranslogReader { /* the total offset of this file including the bytes written to the file as well as into the buffer */ private volatile long totalOffset; - public TranslogWriter(ShardId shardId, long generation, ChannelReference channelReference, ByteSizeValue bufferSize) throws IOException { - super(generation, channelReference, channelReference.getChannel().position()); + protected final AtomicBoolean closed = new AtomicBoolean(false); + + + public TranslogWriter(ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException { + super(generation, channel, path, channel.position()); this.shardId = shardId; - this.outputStream = new BufferedChannelOutputStream(java.nio.channels.Channels.newOutputStream(channelReference.getChannel()), bufferSize.bytesAsInt()); - this.lastSyncedOffset = channelReference.getChannel().position(); + this.outputStream = new BufferedChannelOutputStream(java.nio.channels.Channels.newOutputStream(channel), bufferSize.bytesAsInt()); + this.lastSyncedOffset = channel.position(); totalOffset = lastSyncedOffset; } @@ -74,10 +76,10 @@ public class TranslogWriter extends TranslogReader { } private static int getHeaderLength(int uuidLength) { - return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + RamUsageEstimator.NUM_BYTES_INT; + return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + RamUsageEstimator.NUM_BYTES_INT; } - public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, Callback onClose, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException { + public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException { final BytesRef ref = new BytesRef(translogUUID); final int headerLength = getHeaderLength(ref.length); final FileChannel channel = channelFactory.open(file); @@ -90,7 +92,7 @@ public class TranslogWriter extends TranslogReader { out.writeBytes(ref.bytes, ref.offset, ref.length); channel.force(true); writeCheckpoint(headerLength, 0, file.getParent(), fileGeneration, StandardOpenOption.WRITE); - final TranslogWriter writer = new TranslogWriter(shardId, fileGeneration, new ChannelReference(file, fileGeneration, channel, onClose), bufferSize); + final TranslogWriter writer = new TranslogWriter(shardId, fileGeneration, channel, file, bufferSize); return writer; } catch (Throwable throwable) { // if we fail to bake the file-generation into the checkpoint we stick with the file and once we recover and that @@ -99,9 +101,12 @@ public class TranslogWriter extends TranslogReader { throw throwable; } } - /** If this {@code TranslogWriter} was closed as a side-effect of a tragic exception, - * e.g. disk full while flushing a new segment, this returns the root cause exception. - * Otherwise (no tragic exception has occurred) it returns null. */ + + /** + * If this {@code TranslogWriter} was closed as a side-effect of a tragic exception, + * e.g. disk full while flushing a new segment, this returns the root cause exception. + * Otherwise (no tragic exception has occurred) it returns null. + */ public Throwable getTragicException() { return tragedy; } @@ -110,7 +115,9 @@ public class TranslogWriter extends TranslogReader { assert throwable != null : "throwable must not be null in a tragic event"; if (tragedy == null) { tragedy = throwable; - } else { + } else if (tragedy != throwable) { + // it should be safe to call closeWithTragicEvents on multiple layers without + // worrying about self suppression. tragedy.addSuppressed(throwable); } close(); @@ -134,29 +141,27 @@ public class TranslogWriter extends TranslogReader { } /** - * write all buffered ops to disk and fsync file + * write all buffered ops to disk and fsync file. + * + * Note: any exception during the sync process will be interpreted as a tragic exception and the writer will be closed before + * raising the exception. */ public void sync() throws IOException { if (syncNeeded()) { synchronized (this) { - ensureOpen(); // this call gives a better exception that the incRef if we are closed by a tragic event - channelReference.incRef(); + ensureOpen(); + final long offsetToSync; + final int opsCounter; try { - final long offsetToSync; - final int opsCounter; outputStream.flush(); offsetToSync = totalOffset; opsCounter = operationCounter; - try { - checkpoint(offsetToSync, opsCounter, channelReference); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - lastSyncedOffset = offsetToSync; - } finally { - channelReference.decRef(); + checkpoint(offsetToSync, opsCounter, generation, channel, path); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; } + lastSyncedOffset = offsetToSync; } } } @@ -177,76 +182,36 @@ public class TranslogWriter extends TranslogReader { } /** - * returns a new reader that follows the current writes (most importantly allows making - * repeated snapshots that includes new content) + * closes this writer and transfers it's underlying file channel to a new immutable reader */ - public TranslogReader newReaderFromWriter() { - ensureOpen(); - channelReference.incRef(); - boolean success = false; + public synchronized TranslogReader closeIntoReader() throws IOException { try { - final TranslogReader reader = new InnerReader(this.generation, firstOperationOffset, channelReference); - success = true; - return reader; - } finally { - if (!success) { - channelReference.decRef(); - } + sync(); // sync before we close.. + } catch (IOException e) { + closeWithTragicEvent(e); + throw e; } - } - - /** - * returns a new immutable reader which only exposes the current written operation * - */ - public ImmutableTranslogReader immutableReader() throws TranslogException { - if (channelReference.tryIncRef()) { - synchronized (this) { - try { - ensureOpen(); - outputStream.flush(); - ImmutableTranslogReader reader = new ImmutableTranslogReader(this.generation, channelReference, firstOperationOffset, getWrittenOffset(), operationCounter); - channelReference.incRef(); // for new reader - return reader; - } catch (Exception e) { - throw new TranslogException(shardId, "exception while creating an immutable reader", e); - } finally { - channelReference.decRef(); - } - } + if (closed.compareAndSet(false, true)) { + return new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter); } else { - throw new TranslogException(shardId, "can't increment channel [" + channelReference + "] ref count"); + throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy); } } + @Override + public synchronized Translog.Snapshot newSnapshot() { + ensureOpen(); + try { + sync(); + } catch (IOException e) { + throw new TranslogException(shardId, "exception while syncing before creating a snapshot", e); + } + return super.newSnapshot(); + } + private long getWrittenOffset() throws IOException { - return channelReference.getChannel().position(); - } - - /** - * this class is used when one wants a reference to this file which exposes all recently written operation. - * as such it needs access to the internals of the current reader - */ - final class InnerReader extends TranslogReader { - - public InnerReader(long generation, long fistOperationOffset, ChannelReference channelReference) { - super(generation, channelReference, fistOperationOffset); - } - - @Override - public long sizeInBytes() { - return TranslogWriter.this.sizeInBytes(); - } - - @Override - public int totalOperations() { - return TranslogWriter.this.totalOperations(); - } - - @Override - protected void readBytes(ByteBuffer buffer, long position) throws IOException { - TranslogWriter.this.readBytes(buffer, position); - } + return channel.position(); } /** @@ -264,13 +229,13 @@ public class TranslogWriter extends TranslogReader { @Override protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException { - if (position+targetBuffer.remaining() > getWrittenOffset()) { + if (position + targetBuffer.remaining() > getWrittenOffset()) { synchronized (this) { // we only flush here if it's really really needed - try to minimize the impact of the read operation // in some cases ie. a tragic event we might still be able to read the relevant value // which is not really important in production but some test can make most strict assumptions // if we don't fail in this call unless absolutely necessary. - if (position+targetBuffer.remaining() > getWrittenOffset()) { + if (position + targetBuffer.remaining() > getWrittenOffset()) { outputStream.flush(); } } @@ -280,9 +245,9 @@ public class TranslogWriter extends TranslogReader { Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); } - private synchronized void checkpoint(long lastSyncPosition, int operationCounter, ChannelReference channelReference) throws IOException { - channelReference.getChannel().force(false); - writeCheckpoint(lastSyncPosition, operationCounter, channelReference.getPath().getParent(), channelReference.getGeneration(), StandardOpenOption.WRITE); + private synchronized void checkpoint(long lastSyncPosition, int operationCounter, long generation, FileChannel translogFileChannel, Path translogFilePath) throws IOException { + translogFileChannel.force(false); + writeCheckpoint(lastSyncPosition, operationCounter, translogFilePath.getParent(), generation, StandardOpenOption.WRITE); } private static void writeCheckpoint(long syncPosition, int numOperations, Path translogFile, long generation, OpenOption... options) throws IOException { @@ -307,6 +272,17 @@ public class TranslogWriter extends TranslogReader { } } + @Override + public final void close() throws IOException { + if (closed.compareAndSet(false, true)) { + channel.close(); + } + } + + protected final boolean isClosed() { + return closed.get(); + } + private final class BufferedChannelOutputStream extends BufferedOutputStream { diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index faf7f73dc23..907a64f8175 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -22,9 +22,7 @@ package org.elasticsearch.indices; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.common.geo.ShapesAvailability; -import org.elasticsearch.common.geo.builders.ShapeBuilderRegistry; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; @@ -57,52 +55,6 @@ import org.elasticsearch.index.mapper.internal.VersionFieldMapper; import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.percolator.PercolatorFieldMapper; -import org.elasticsearch.index.query.BoolQueryParser; -import org.elasticsearch.index.query.BoostingQueryParser; -import org.elasticsearch.index.query.CommonTermsQueryParser; -import org.elasticsearch.index.query.ConstantScoreQueryParser; -import org.elasticsearch.index.query.DisMaxQueryParser; -import org.elasticsearch.index.query.ExistsQueryParser; -import org.elasticsearch.index.query.FieldMaskingSpanQueryParser; -import org.elasticsearch.index.query.FuzzyQueryParser; -import org.elasticsearch.index.query.GeoBoundingBoxQueryParser; -import org.elasticsearch.index.query.GeoDistanceQueryParser; -import org.elasticsearch.index.query.GeoDistanceRangeQueryParser; -import org.elasticsearch.index.query.GeoPolygonQueryParser; -import org.elasticsearch.index.query.GeoShapeQueryParser; -import org.elasticsearch.index.query.GeohashCellQuery; -import org.elasticsearch.index.query.HasChildQueryParser; -import org.elasticsearch.index.query.HasParentQueryParser; -import org.elasticsearch.index.query.IdsQueryParser; -import org.elasticsearch.index.query.IndicesQueryParser; -import org.elasticsearch.index.query.MatchAllQueryParser; -import org.elasticsearch.index.query.MatchNoneQueryParser; -import org.elasticsearch.index.query.MatchQueryParser; -import org.elasticsearch.index.query.MoreLikeThisQueryParser; -import org.elasticsearch.index.query.MultiMatchQueryParser; -import org.elasticsearch.index.query.NestedQueryParser; -import org.elasticsearch.index.query.PrefixQueryParser; -import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.index.query.QueryStringQueryParser; -import org.elasticsearch.index.query.RangeQueryParser; -import org.elasticsearch.index.query.RegexpQueryParser; -import org.elasticsearch.index.query.ScriptQueryParser; -import org.elasticsearch.index.query.SimpleQueryStringParser; -import org.elasticsearch.index.query.SpanContainingQueryParser; -import org.elasticsearch.index.query.SpanFirstQueryParser; -import org.elasticsearch.index.query.SpanMultiTermQueryParser; -import org.elasticsearch.index.query.SpanNearQueryParser; -import org.elasticsearch.index.query.SpanNotQueryParser; -import org.elasticsearch.index.query.SpanOrQueryParser; -import org.elasticsearch.index.query.SpanTermQueryParser; -import org.elasticsearch.index.query.SpanWithinQueryParser; -import org.elasticsearch.index.query.TemplateQueryParser; -import org.elasticsearch.index.query.TermQueryParser; -import org.elasticsearch.index.query.TermsQueryParser; -import org.elasticsearch.index.query.TypeQueryParser; -import org.elasticsearch.index.query.WildcardQueryParser; -import org.elasticsearch.index.query.WrapperQueryParser; -import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.cache.request.IndicesRequestCache; @@ -111,7 +63,6 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener; import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.RecoverySource; import org.elasticsearch.indices.recovery.RecoveryTarget; @@ -127,10 +78,6 @@ import java.util.Map; */ public class IndicesModule extends AbstractModule { - - private final ExtensionPoint.ClassSet queryParsers - = new ExtensionPoint.ClassSet<>("query_parser", QueryParser.class); - private final Map mapperParsers = new LinkedHashMap<>(); // Use a LinkedHashMap for metadataMappers because iteration order matters @@ -138,62 +85,10 @@ public class IndicesModule extends AbstractModule { = new LinkedHashMap<>(); public IndicesModule() { - registerBuiltinQueryParsers(); registerBuiltInMappers(); registerBuiltInMetadataMappers(); } - private void registerBuiltinQueryParsers() { - registerQueryParser(MatchQueryParser.class); - registerQueryParser(MultiMatchQueryParser.class); - registerQueryParser(NestedQueryParser.class); - registerQueryParser(HasChildQueryParser.class); - registerQueryParser(HasParentQueryParser.class); - registerQueryParser(DisMaxQueryParser.class); - registerQueryParser(IdsQueryParser.class); - registerQueryParser(MatchAllQueryParser.class); - registerQueryParser(QueryStringQueryParser.class); - registerQueryParser(BoostingQueryParser.class); - registerQueryParser(BoolQueryParser.class); - registerQueryParser(TermQueryParser.class); - registerQueryParser(TermsQueryParser.class); - registerQueryParser(FuzzyQueryParser.class); - registerQueryParser(RegexpQueryParser.class); - registerQueryParser(RangeQueryParser.class); - registerQueryParser(PrefixQueryParser.class); - registerQueryParser(WildcardQueryParser.class); - registerQueryParser(ConstantScoreQueryParser.class); - registerQueryParser(SpanTermQueryParser.class); - registerQueryParser(SpanNotQueryParser.class); - registerQueryParser(SpanWithinQueryParser.class); - registerQueryParser(SpanContainingQueryParser.class); - registerQueryParser(FieldMaskingSpanQueryParser.class); - registerQueryParser(SpanFirstQueryParser.class); - registerQueryParser(SpanNearQueryParser.class); - registerQueryParser(SpanOrQueryParser.class); - registerQueryParser(MoreLikeThisQueryParser.class); - registerQueryParser(WrapperQueryParser.class); - registerQueryParser(IndicesQueryParser.class); - registerQueryParser(CommonTermsQueryParser.class); - registerQueryParser(SpanMultiTermQueryParser.class); - registerQueryParser(FunctionScoreQueryParser.class); - registerQueryParser(SimpleQueryStringParser.class); - registerQueryParser(TemplateQueryParser.class); - registerQueryParser(TypeQueryParser.class); - registerQueryParser(ScriptQueryParser.class); - registerQueryParser(GeoDistanceQueryParser.class); - registerQueryParser(GeoDistanceRangeQueryParser.class); - registerQueryParser(GeoBoundingBoxQueryParser.class); - registerQueryParser(GeohashCellQuery.Parser.class); - registerQueryParser(GeoPolygonQueryParser.class); - registerQueryParser(ExistsQueryParser.class); - registerQueryParser(MatchNoneQueryParser.class); - - if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { - registerQueryParser(GeoShapeQueryParser.class); - } - } - private void registerBuiltInMappers() { registerMapper(ByteFieldMapper.CONTENT_TYPE, new ByteFieldMapper.TypeParser()); registerMapper(ShortFieldMapper.CONTENT_TYPE, new ShortFieldMapper.TypeParser()); @@ -237,10 +132,6 @@ public class IndicesModule extends AbstractModule { // last so that it can see all other mappers, including those coming from plugins } - public void registerQueryParser(Class queryParser) { - queryParsers.registerExtension(queryParser); - } - /** * Register a mapper for the given type. */ @@ -263,7 +154,6 @@ public class IndicesModule extends AbstractModule { @Override protected void configure() { - bindQueryParsersExtension(); bindMapperExtension(); bind(IndicesService.class).asEagerSingleton(); @@ -284,7 +174,6 @@ public class IndicesModule extends AbstractModule { bind(IndicesFieldDataCacheListener.class).asEagerSingleton(); bind(TermVectorsService.class).asEagerSingleton(); bind(NodeServicesProvider.class).asEagerSingleton(); - bind(ShapeBuilderRegistry.class).asEagerSingleton(); } // public for testing @@ -303,9 +192,4 @@ public class IndicesModule extends AbstractModule { protected void bindMapperExtension() { bind(MapperRegistry.class).toInstance(getMapperRegistry()); } - - protected void bindQueryParsersExtension() { - queryParsers.bind(binder()); - bind(IndicesQueriesRegistry.class).asEagerSingleton(); - } } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index c3c0ffe4732..fdc448989d5 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -70,7 +71,6 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.nio.file.Files; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -101,6 +101,7 @@ public class IndicesService extends AbstractLifecycleComponent i private final IndicesQueriesRegistry indicesQueriesRegistry; private final ClusterService clusterService; private final IndexNameExpressionResolver indexNameExpressionResolver; + private final IndexScopedSettings indexScopeSetting; private volatile Map indices = emptyMap(); private final Map> pendingDeletes = new HashMap<>(); private final OldShardsStats oldShardsStats = new OldShardsStats(); @@ -116,7 +117,7 @@ public class IndicesService extends AbstractLifecycleComponent i public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv, ClusterSettings clusterSettings, AnalysisRegistry analysisRegistry, IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, MapperRegistry mapperRegistry, ThreadPool threadPool) { + ClusterService clusterService, MapperRegistry mapperRegistry, ThreadPool threadPool, IndexScopedSettings indexScopedSettings) { super(settings); this.pluginsService = pluginsService; this.nodeEnv = nodeEnv; @@ -130,6 +131,7 @@ public class IndicesService extends AbstractLifecycleComponent i clusterSettings.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, indexStoreConfig::setRateLimitingType); clusterSettings.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, indexStoreConfig::setRateLimitingThrottle); indexingMemoryController = new IndexingMemoryController(settings, threadPool, this); + this.indexScopeSetting = indexScopedSettings; } @Override @@ -280,7 +282,7 @@ public class IndicesService extends AbstractLifecycleComponent i } final String indexName = indexMetaData.getIndex(); final Predicate indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(indexName, indexExpression, clusterService.state()); - final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, Collections.emptyList(), indexNameMatcher); + final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexNameMatcher, indexScopeSetting); Index index = new Index(indexMetaData.getIndex()); if (indices.containsKey(index.name())) { throw new IndexAlreadyExistsException(index); @@ -570,7 +572,7 @@ public class IndicesService extends AbstractLifecycleComponent i // play safe here and make sure that we take node level settings into account. // we might run on nodes where we use shard FS and then in the future don't delete // actual content. - return new IndexSettings(metaData, settings, Collections.emptyList()); + return new IndexSettings(metaData, settings); } /** diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java index 8c3ba6dc47d..c3364de9d5b 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java @@ -39,8 +39,6 @@ import java.util.concurrent.TimeUnit; */ public final class IndicesWarmer extends AbstractComponent { - public static final String INDEX_WARMER_ENABLED = "index.warmer.enabled"; - private final ThreadPool threadPool; private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); @@ -62,8 +60,7 @@ public final class IndicesWarmer extends AbstractComponent { if (shard.state() == IndexShardState.CLOSED) { return; } - final Settings indexSettings = settings.getSettings(); - if (!indexSettings.getAsBoolean(INDEX_WARMER_ENABLED, settings.getNodeSettings().getAsBoolean(INDEX_WARMER_ENABLED, true))) { + if (settings.isWarmerEnabled() == false) { return; } if (logger.isTraceEnabled()) { diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 7b7fca4b37b..e73396fcd7f 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -78,7 +78,7 @@ public final class AnalysisModule extends AbstractModule { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("_na_").settings(build).build(); - NA_INDEX_SETTINGS = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + NA_INDEX_SETTINGS = new IndexSettings(metaData, Settings.EMPTY); } private static final IndexSettings NA_INDEX_SETTINGS; private final Environment environment; diff --git a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java index a00cc7e787c..6a252178d4d 100644 --- a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -78,7 +79,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis * A setting to enable or disable request caching on an index level. Its dynamic by default * since we are checking on the cluster state IndexMetaData always. */ - public static final String INDEX_CACHE_REQUEST_ENABLED = "index.requests.cache.enable"; + public static final Setting INDEX_CACHE_REQUEST_ENABLED_SETTING = Setting.boolSetting("index.requests.cache.enable", true, true, Setting.Scope.INDEX); public static final String INDICES_CACHE_REQUEST_CLEAN_INTERVAL = "indices.requests.cache.clean_interval"; public static final String INDICES_CACHE_QUERY_SIZE = "indices.requests.cache.size"; @@ -118,9 +119,6 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis threadPool.schedule(cleanInterval, ThreadPool.Names.SAME, reaper); } - private boolean isCacheEnabled(Settings settings, boolean defaultEnable) { - return settings.getAsBoolean(INDEX_CACHE_REQUEST_ENABLED, defaultEnable); - } private void buildCache() { long sizeInBytes = MemorySizeValue.parseBytesSizeValueOrHeapRatio(size, INDICES_CACHE_QUERY_SIZE).bytes(); @@ -182,7 +180,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis } // if not explicitly set in the request, use the index setting, if not, use the request if (request.requestCache() == null) { - if (!isCacheEnabled(index.getSettings(), Boolean.FALSE)) { + if (INDEX_CACHE_REQUEST_ENABLED_SETTING.get(index.getSettings()) == false) { return false; } } else if (!request.requestCache()) { diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 9357de7b1eb..49068eec002 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -257,7 +257,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent { try { if (indexShard.recoverFromStore(nodes.localNode())) { - shardStateAction.shardStarted(state, shardRouting, indexMetaData.getIndexUUID(), "after recovery from store"); + shardStateAction.shardStarted(shardRouting, indexMetaData.getIndexUUID(), "after recovery from store", SHARD_STATE_ACTION_LISTENER); } } catch (Throwable t) { handleRecoveryFailure(indexService, shardRouting, true, t); @@ -664,7 +664,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent> queryParsers; - @Inject - public IndicesQueriesRegistry(Settings settings, Set injectedQueryParsers, NamedWriteableRegistry namedWriteableRegistry) { + public IndicesQueriesRegistry(Settings settings, Map> queryParsers) { super(settings); - Map> queryParsers = new HashMap<>(); - for (@SuppressWarnings("unchecked") QueryParser queryParser : injectedQueryParsers) { - for (String name : queryParser.names()) { - queryParsers.put(name, queryParser); - } - @SuppressWarnings("unchecked") NamedWriteable qb = queryParser.getBuilderPrototype(); - namedWriteableRegistry.registerPrototype(QueryBuilder.class, qb); - } - // EmptyQueryBuilder is not registered as query parser but used internally. - // We need to register it with the NamedWriteableRegistry in order to serialize it - namedWriteableRegistry.registerPrototype(QueryBuilder.class, EmptyQueryBuilder.PROTOTYPE); - this.queryParsers = unmodifiableMap(queryParsers); + this.queryParsers = queryParsers; } /** diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java index c0270e71721..1ef9215b7b4 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java @@ -37,6 +37,7 @@ import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportService; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -82,7 +83,7 @@ public class RecoverySource extends AbstractComponent implements IndexEventListe } } - private RecoveryResponse recover(final StartRecoveryRequest request) { + private RecoveryResponse recover(final StartRecoveryRequest request) throws IOException { final IndexService indexService = indicesService.indexServiceSafe(request.shardId().index().name()); final IndexShard shard = indexService.getShard(request.shardId().id()); diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 94c78efccd8..4699e8d5ace 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -120,7 +120,7 @@ public class RecoverySourceHandler { /** * performs the recovery from the local engine to the target */ - public RecoveryResponse recoverToTarget() { + public RecoveryResponse recoverToTarget() throws IOException { try (Translog.View translogView = shard.acquireTranslogView()) { logger.trace("captured translog id [{}] for recovery", translogView.minTranslogGeneration()); final IndexCommit phase1Snapshot; @@ -144,8 +144,8 @@ public class RecoverySourceHandler { } logger.trace("snapshot translog for recovery. current size is [{}]", translogView.totalOperations()); - try (Translog.Snapshot phase2Snapshot = translogView.snapshot()) { - phase2(phase2Snapshot); + try { + phase2(translogView.snapshot()); } catch (Throwable e) { throw new RecoveryEngineException(shard.shardId(), 2, "phase2 failed", e); } @@ -308,7 +308,7 @@ public class RecoverySourceHandler { }); } - prepareTargetForTranslog(translogView); + prepareTargetForTranslog(translogView.totalOperations()); logger.trace("[{}][{}] recovery [phase1] to {}: took [{}]", indexName, shardId, request.targetNode(), stopWatch.totalTime()); response.phase1Time = stopWatch.totalTime().millis(); @@ -320,8 +320,7 @@ public class RecoverySourceHandler { } - - protected void prepareTargetForTranslog(final Translog.View translogView) { + protected void prepareTargetForTranslog(final int totalTranslogOps) { StopWatch stopWatch = new StopWatch().start(); logger.trace("{} recovery [phase1] to {}: prepare remote engine for translog", request.shardId(), request.targetNode()); final long startEngineStart = stopWatch.totalTime().millis(); @@ -332,7 +331,7 @@ public class RecoverySourceHandler { // operations. This ensures the shard engine is started and disables // garbage collection (not the JVM's GC!) of tombstone deletes transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.PREPARE_TRANSLOG, - new RecoveryPrepareForTranslogOperationsRequest(request.recoveryId(), request.shardId(), translogView.totalOperations()), + new RecoveryPrepareForTranslogOperationsRequest(request.recoveryId(), request.shardId(), totalTranslogOps), TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); } }); @@ -463,14 +462,14 @@ public class RecoverySourceHandler { // make sense to re-enable throttling in this phase cancellableThreads.execute(() -> { final RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest( - request.recoveryId(), request.shardId(), operations, snapshot.estimatedTotalOperations()); + request.recoveryId(), request.shardId(), operations, snapshot.totalOperations()); transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.TRANSLOG_OPS, translogOperationsRequest, recoveryOptions, EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); }); if (logger.isTraceEnabled()) { logger.trace("[{}][{}] sent batch of [{}][{}] (total: [{}]) translog operations to {}", indexName, shardId, ops, new ByteSizeValue(size), - snapshot.estimatedTotalOperations(), + snapshot.totalOperations(), request.targetNode()); } @@ -488,7 +487,7 @@ public class RecoverySourceHandler { if (!operations.isEmpty()) { cancellableThreads.execute(() -> { RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest( - request.recoveryId(), request.shardId(), operations, snapshot.estimatedTotalOperations()); + request.recoveryId(), request.shardId(), operations, snapshot.totalOperations()); transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.TRANSLOG_OPS, translogOperationsRequest, recoveryOptions, EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); }); @@ -497,7 +496,7 @@ public class RecoverySourceHandler { if (logger.isTraceEnabled()) { logger.trace("[{}][{}] sent final batch of [{}][{}] (total: [{}]) translog operations to {}", indexName, shardId, ops, new ByteSizeValue(size), - snapshot.estimatedTotalOperations(), + snapshot.totalOperations(), request.targetNode()); } return totalOperations; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java index e849580b2c4..16bd1d46553 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java @@ -58,7 +58,7 @@ public class SharedFSRecoverySourceHandler extends RecoverySourceHandler { shard.failShard("failed to close engine (phase1)", e); } } - prepareTargetForTranslog(Translog.View.EMPTY_VIEW); + prepareTargetForTranslog(0); finalizeRecovery(); return response; } catch (Throwable t) { diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 4a76d262130..dc483932fec 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -111,7 +111,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe } for (IndexRoutingTable indexRoutingTable : event.state().routingTable()) { - IndexSettings indexSettings = new IndexSettings(event.state().getMetaData().index(indexRoutingTable.index()), settings, Collections.emptyList()); + IndexSettings indexSettings = new IndexSettings(event.state().getMetaData().index(indexRoutingTable.index()), settings); // Note, closed indices will not have any routing information, so won't be deleted for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { if (shardCanBeDeleted(event.state(), indexShardRoutingTable)) { diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 65902b443e9..6a6b05c4ad1 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -171,11 +172,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction { public static final Setting INDICES_TTL_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.CLUSTER); - public static final String INDEX_TTL_DISABLE_PURGE = "index.ttl.disable_purge"; private final ClusterService clusterService; private final IndicesService indicesService; @@ -164,8 +163,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent headers) { + private static void copyHeadersAndContext(ActionRequest actionRequest, RestRequest restRequest, Set headers) { for (String usefulHeader : headers) { String headerValue = restRequest.header(usefulHeader); if (headerValue != null) { @@ -82,7 +82,8 @@ public abstract class BaseRestHandler extends AbstractComponent implements RestH } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + Action action, Request request, ActionListener listener) { copyHeadersAndContext(request, restRequest, headers); super.doExecute(action, request, listener); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java index fc4432a658f..5acbfc48d24 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java @@ -58,7 +58,7 @@ public class RestClusterGetSettingsAction extends BaseRestHandler { ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest() .routingTable(false) .nodes(false); - final boolean renderDefaults = request.paramAsBoolean("defaults", false); + final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local())); client.admin().cluster().state(clusterStateRequest, new RestBuilderListener(channel) { @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java index b7371f7b80e..e23dec0f0bc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,9 +53,12 @@ import static org.elasticsearch.rest.RestStatus.OK; */ public class RestGetIndicesAction extends BaseRestHandler { + private final IndexScopedSettings indexScopedSettings; + @Inject - public RestGetIndicesAction(Settings settings, RestController controller, Client client) { + public RestGetIndicesAction(Settings settings, RestController controller, Client client, IndexScopedSettings indexScopedSettings) { super(settings, controller, client); + this.indexScopedSettings = indexScopedSettings; controller.registerHandler(GET, "/{index}", this); controller.registerHandler(GET, "/{index}/{type}", this); } @@ -133,9 +137,15 @@ public class RestGetIndicesAction extends BaseRestHandler { } private void writeSettings(Settings settings, XContentBuilder builder, Params params) throws IOException { + final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); builder.startObject(Fields.SETTINGS); settings.toXContent(builder, params); builder.endObject(); + if (renderDefaults) { + builder.startObject("defaults"); + indexScopedSettings.diff(settings, settings).toXContent(builder, request); + builder.endObject(); + } } }); @@ -145,7 +155,6 @@ public class RestGetIndicesAction extends BaseRestHandler { static final XContentBuilderString ALIASES = new XContentBuilderString("aliases"); static final XContentBuilderString MAPPINGS = new XContentBuilderString("mappings"); static final XContentBuilderString SETTINGS = new XContentBuilderString("settings"); - static final XContentBuilderString WARMERS = new XContentBuilderString("warmers"); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java index f27897aa731..b924acc5fb3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java @@ -26,9 +26,9 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; @@ -42,9 +42,12 @@ import static org.elasticsearch.rest.RestStatus.OK; public class RestGetSettingsAction extends BaseRestHandler { + private final IndexScopedSettings indexScopedSettings; + @Inject - public RestGetSettingsAction(Settings settings, RestController controller, Client client) { + public RestGetSettingsAction(Settings settings, RestController controller, Client client, IndexScopedSettings indexScopedSettings) { super(settings, controller, client); + this.indexScopedSettings = indexScopedSettings; controller.registerHandler(GET, "/{index}/_settings/{name}", this); controller.registerHandler(GET, "/_settings/{name}", this); controller.registerHandler(GET, "/{index}/_setting/{name}", this); @@ -53,6 +56,7 @@ public class RestGetSettingsAction extends BaseRestHandler { @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { final String[] names = request.paramAsStringArrayOrEmptyIfAll("name"); + final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); GetSettingsRequest getSettingsRequest = new GetSettingsRequest() .indices(Strings.splitStringByCommaToArray(request.param("index"))) .indicesOptions(IndicesOptions.fromRequest(request, IndicesOptions.strictExpandOpen())) @@ -71,9 +75,14 @@ public class RestGetSettingsAction extends BaseRestHandler { continue; } builder.startObject(cursor.key, XContentBuilder.FieldCaseConversion.NONE); - builder.startObject(Fields.SETTINGS); + builder.startObject("settings"); cursor.value.toXContent(builder, request); builder.endObject(); + if (renderDefaults) { + builder.startObject("defaults"); + indexScopedSettings.diff(cursor.value, settings).toXContent(builder, request); + builder.endObject(); + } builder.endObject(); } builder.endObject(); @@ -81,8 +90,4 @@ public class RestGetSettingsAction extends BaseRestHandler { } }); } - - static class Fields { - static final XContentBuilderString SETTINGS = new XContentBuilderString("settings"); - } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 7bbc5b9eff9..c7eab8ca14b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -266,9 +266,9 @@ public class RestNodesAction extends AbstractCatAction { table.addCell(osStats == null ? null : Short.toString(osStats.getCpu().getPercent())); boolean hasLoadAverage = osStats != null && osStats.getCpu().getLoadAverage() != null; - table.addCell(!hasLoadAverage ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[0])); - table.addCell(!hasLoadAverage ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[1])); - table.addCell(!hasLoadAverage ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[2])); + table.addCell(!hasLoadAverage || osStats.getCpu().getLoadAverage()[0] == -1 ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[0])); + table.addCell(!hasLoadAverage || osStats.getCpu().getLoadAverage()[1] == -1 ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[1])); + table.addCell(!hasLoadAverage || osStats.getCpu().getLoadAverage()[2] == -1 ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[2])); table.addCell(jvmStats == null ? null : jvmStats.getUptime()); table.addCell(node.clientNode() ? "c" : node.dataNode() ? "d" : "-"); table.addCell(masterId == null ? "x" : masterId.equals(node.id()) ? "*" : node.masterNode() ? "m" : "-"); diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 6f16d13bd92..e5c3e90739e 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -19,10 +19,89 @@ package org.elasticsearch.search; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + +import org.apache.lucene.search.BooleanQuery; +import org.elasticsearch.common.geo.ShapesAvailability; +import org.elasticsearch.common.geo.builders.CircleBuilder; +import org.elasticsearch.common.geo.builders.EnvelopeBuilder; +import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; +import org.elasticsearch.common.geo.builders.LineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiLineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiPointBuilder; +import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; +import org.elasticsearch.common.geo.builders.PointBuilder; +import org.elasticsearch.common.geo.builders.PolygonBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.BoolQueryParser; +import org.elasticsearch.index.query.BoostingQueryParser; +import org.elasticsearch.index.query.CommonTermsQueryParser; +import org.elasticsearch.index.query.ConstantScoreQueryParser; +import org.elasticsearch.index.query.DisMaxQueryParser; +import org.elasticsearch.index.query.EmptyQueryBuilder; +import org.elasticsearch.index.query.ExistsQueryParser; +import org.elasticsearch.index.query.FieldMaskingSpanQueryParser; +import org.elasticsearch.index.query.FuzzyQueryParser; +import org.elasticsearch.index.query.GeoBoundingBoxQueryParser; +import org.elasticsearch.index.query.GeoDistanceQueryParser; +import org.elasticsearch.index.query.GeoDistanceRangeQueryParser; +import org.elasticsearch.index.query.GeoPolygonQueryParser; +import org.elasticsearch.index.query.GeoShapeQueryParser; +import org.elasticsearch.index.query.GeohashCellQuery; +import org.elasticsearch.index.query.HasChildQueryParser; +import org.elasticsearch.index.query.HasParentQueryParser; +import org.elasticsearch.index.query.IdsQueryParser; +import org.elasticsearch.index.query.IndicesQueryParser; +import org.elasticsearch.index.query.MatchAllQueryParser; +import org.elasticsearch.index.query.MatchNoneQueryParser; +import org.elasticsearch.index.query.MatchQueryParser; +import org.elasticsearch.index.query.MoreLikeThisQueryParser; +import org.elasticsearch.index.query.MultiMatchQueryParser; +import org.elasticsearch.index.query.NestedQueryParser; +import org.elasticsearch.index.query.PrefixQueryParser; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.QueryStringQueryParser; +import org.elasticsearch.index.query.RangeQueryParser; +import org.elasticsearch.index.query.RegexpQueryParser; +import org.elasticsearch.index.query.ScriptQueryParser; +import org.elasticsearch.index.query.SimpleQueryStringParser; +import org.elasticsearch.index.query.SpanContainingQueryParser; +import org.elasticsearch.index.query.SpanFirstQueryParser; +import org.elasticsearch.index.query.SpanMultiTermQueryParser; +import org.elasticsearch.index.query.SpanNearQueryParser; +import org.elasticsearch.index.query.SpanNotQueryParser; +import org.elasticsearch.index.query.SpanOrQueryParser; +import org.elasticsearch.index.query.SpanTermQueryParser; +import org.elasticsearch.index.query.SpanWithinQueryParser; +import org.elasticsearch.index.query.TemplateQueryParser; +import org.elasticsearch.index.query.TermQueryParser; +import org.elasticsearch.index.query.TermsQueryParser; +import org.elasticsearch.index.query.TypeQueryParser; +import org.elasticsearch.index.query.WildcardQueryParser; +import org.elasticsearch.index.query.WrapperQueryParser; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper; +import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionParser; +import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionParser; +import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser; +import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionParser; +import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionParser; +import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionParser; +import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.action.SearchServiceTransportAction; import org.elasticsearch.search.aggregations.AggregationParseElement; import org.elasticsearch.search.aggregations.AggregationPhase; @@ -149,9 +228,6 @@ import org.elasticsearch.search.query.QueryPhase; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggesters; -import java.util.HashSet; -import java.util.Set; - /** * */ @@ -161,14 +237,35 @@ public class SearchModule extends AbstractModule { private final Set> pipelineAggParsers = new HashSet<>(); private final Highlighters highlighters = new Highlighters(); private final Suggesters suggesters = new Suggesters(); - private final Set> functionScoreParsers = new HashSet<>(); + /** + * Function score parsers constructed on registration. This is ok because + * they don't have any dependencies. + */ + private final Map> functionScoreParsers = new HashMap<>(); + /** + * Query parsers constructed at configure time. These have to be constructed + * at configure time because they depend on things that are registered by + * plugins (function score parsers). + */ + private final List>> queryParsers = new ArrayList<>(); private final Set> fetchSubPhases = new HashSet<>(); private final Set> heuristicParsers = new HashSet<>(); private final Set> modelParsers = new HashSet<>(); + private final Settings settings; + private final NamedWriteableRegistry namedWriteableRegistry; + // pkg private so tests can mock Class searchServiceImpl = SearchService.class; + public SearchModule(Settings settings, NamedWriteableRegistry namedWriteableRegistry) { + this.settings = settings; + this.namedWriteableRegistry = namedWriteableRegistry; + + registerBuiltinFunctionScoreParsers(); + registerBuiltinQueryParsers(); + } + public void registerHighlighter(String key, Class clazz) { highlighters.registerExtension(key, clazz); } @@ -177,8 +274,21 @@ public class SearchModule extends AbstractModule { suggesters.registerExtension(key, suggester); } - public void registerFunctionScoreParser(Class parser) { - functionScoreParsers.add(parser); + /** + * Register a new ScoreFunctionParser. + */ + public void registerFunctionScoreParser(ScoreFunctionParser parser) { + for (String name: parser.getNames()) { + Object oldValue = functionScoreParsers.putIfAbsent(name, parser); + if (oldValue != null) { + throw new IllegalArgumentException("Function score parser [" + oldValue + "] already registered for name [" + name + "]"); + } + } + namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, parser.getBuilderPrototype()); + } + + public void registerQueryParser(Supplier> parser) { + queryParsers.add(parser); } public void registerFetchSubPhase(Class subPhase) { @@ -212,8 +322,9 @@ public class SearchModule extends AbstractModule { configureAggs(); configureHighlighters(); configureSuggesters(); - configureFunctionScore(); + bind(IndicesQueriesRegistry.class).toInstance(buildQueryParserRegistry()); configureFetchSubPhase(); + configureShapes(); } protected void configureFetchSubPhase() { @@ -231,16 +342,23 @@ public class SearchModule extends AbstractModule { bind(InnerHitsFetchSubPhase.class).asEagerSingleton(); } - protected void configureSuggesters() { - suggesters.bind(binder()); + public IndicesQueriesRegistry buildQueryParserRegistry() { + Map> queryParsersMap = new HashMap<>(); + for (Supplier> parserSupplier : queryParsers) { + QueryParser parser = parserSupplier.get(); + for (String name: parser.names()) { + Object oldValue = queryParsersMap.putIfAbsent(name, parser); + if (oldValue != null) { + throw new IllegalArgumentException("Query parser [" + oldValue + "] already registered for name [" + name + "] while trying to register [" + parser + "]"); + } + } + namedWriteableRegistry.registerPrototype(QueryBuilder.class, parser.getBuilderPrototype()); + } + return new IndicesQueriesRegistry(settings, queryParsersMap); } - protected void configureFunctionScore() { - Multibinder parserMapBinder = Multibinder.newSetBinder(binder(), ScoreFunctionParser.class); - for (Class clazz : functionScoreParsers) { - parserMapBinder.addBinding().to(clazz); - } - bind(ScoreFunctionParserMapper.class).asEagerSingleton(); + protected void configureSuggesters() { + suggesters.bind(binder()); } protected void configureHighlighters() { @@ -332,6 +450,87 @@ public class SearchModule extends AbstractModule { } } + private void configureShapes() { + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, GeometryCollectionBuilder.PROTOTYPE); + } + } + + private void registerBuiltinFunctionScoreParsers() { + registerFunctionScoreParser(new ScriptScoreFunctionParser()); + registerFunctionScoreParser(new GaussDecayFunctionParser()); + registerFunctionScoreParser(new LinearDecayFunctionParser()); + registerFunctionScoreParser(new ExponentialDecayFunctionParser()); + registerFunctionScoreParser(new RandomScoreFunctionParser()); + registerFunctionScoreParser(new FieldValueFactorFunctionParser()); + //weight doesn't have its own parser, so every function supports it out of the box. + //Can be a single function too when not associated to any other function, which is why it needs to be registered manually here. + namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, new WeightBuilder()); + } + + private void registerBuiltinQueryParsers() { + registerQueryParser(MatchQueryParser::new); + registerQueryParser(MultiMatchQueryParser::new); + registerQueryParser(NestedQueryParser::new); + registerQueryParser(HasChildQueryParser::new); + registerQueryParser(HasParentQueryParser::new); + registerQueryParser(DisMaxQueryParser::new); + registerQueryParser(IdsQueryParser::new); + registerQueryParser(MatchAllQueryParser::new); + registerQueryParser(QueryStringQueryParser::new); + registerQueryParser(BoostingQueryParser::new); + BooleanQuery.setMaxClauseCount(settings.getAsInt("index.query.bool.max_clause_count", settings.getAsInt("indices.query.bool.max_clause_count", BooleanQuery.getMaxClauseCount()))); + registerQueryParser(BoolQueryParser::new); + registerQueryParser(TermQueryParser::new); + registerQueryParser(TermsQueryParser::new); + registerQueryParser(FuzzyQueryParser::new); + registerQueryParser(RegexpQueryParser::new); + registerQueryParser(RangeQueryParser::new); + registerQueryParser(PrefixQueryParser::new); + registerQueryParser(WildcardQueryParser::new); + registerQueryParser(ConstantScoreQueryParser::new); + registerQueryParser(SpanTermQueryParser::new); + registerQueryParser(SpanNotQueryParser::new); + registerQueryParser(SpanWithinQueryParser::new); + registerQueryParser(SpanContainingQueryParser::new); + registerQueryParser(FieldMaskingSpanQueryParser::new); + registerQueryParser(SpanFirstQueryParser::new); + registerQueryParser(SpanNearQueryParser::new); + registerQueryParser(SpanOrQueryParser::new); + registerQueryParser(MoreLikeThisQueryParser::new); + registerQueryParser(WrapperQueryParser::new); + registerQueryParser(IndicesQueryParser::new); + registerQueryParser(CommonTermsQueryParser::new); + registerQueryParser(SpanMultiTermQueryParser::new); + // This is delayed until configure time to give plugins a chance to register parsers + registerQueryParser(() -> new FunctionScoreQueryParser(new ScoreFunctionParserMapper(functionScoreParsers))); + registerQueryParser(SimpleQueryStringParser::new); + registerQueryParser(TemplateQueryParser::new); + registerQueryParser(TypeQueryParser::new); + registerQueryParser(ScriptQueryParser::new); + registerQueryParser(GeoDistanceQueryParser::new); + registerQueryParser(GeoDistanceRangeQueryParser::new); + registerQueryParser(GeoBoundingBoxQueryParser::new); + registerQueryParser(GeohashCellQuery.Parser::new); + registerQueryParser(GeoPolygonQueryParser::new); + registerQueryParser(ExistsQueryParser::new); + registerQueryParser(MatchNoneQueryParser::new); + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { + registerQueryParser(GeoShapeQueryParser::new); + } + // EmptyQueryBuilder is not registered as query parser but used internally. + // We need to register it with the NamedWriteableRegistry in order to serialize it + namedWriteableRegistry.registerPrototype(QueryBuilder.class, EmptyQueryBuilder.PROTOTYPE); + } + static { // calcs InternalAvg.registerStreams(); @@ -393,5 +592,4 @@ public class SearchModule extends AbstractModule { BucketSelectorPipelineAggregator.registerStreams(); SerialDiffPipelineAggregator.registerStreams(); } - } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 473282a5777..84d00cb54bd 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -121,7 +121,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; */ public class SearchService extends AbstractLifecycleComponent implements IndexEventListener { - public static final String NORMS_LOADING_KEY = "index.norms.loading"; + public static final Setting INDEX_NORMS_LOADING_SETTING = new Setting<>("index.norms.loading", Loading.LAZY.toString(), (s) -> Loading.parse(s, Loading.LAZY), false, Setting.Scope.INDEX); public static final String DEFAULT_KEEPALIVE_KEY = "search.default_keep_alive"; public static final String KEEPALIVE_INTERVAL_KEY = "search.keep_alive_interval"; @@ -962,7 +962,7 @@ public class SearchService extends AbstractLifecycleComponent imp } @Override public TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) { - final Loading defaultLoading = Loading.parse(indexShard.getIndexSettings().getSettings().get(NORMS_LOADING_KEY), Loading.LAZY); + final Loading defaultLoading = indexShard.indexSettings().getValue(INDEX_NORMS_LOADING_SETTING); final MapperService mapperService = indexShard.mapperService(); final ObjectSet warmUp = new ObjectHashSet<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java index 6473b5ae7f2..60302f2da42 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java @@ -62,7 +62,8 @@ public class GeoHashGridParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoHashGrid.TYPE, context).build(); + ValuesSourceParser vsParser = ValuesSourceParser.geoPoint(aggregationName, InternalGeoHashGrid.TYPE, context) + .build(); int precision = GeoHashGridParams.DEFAULT_PRECISION; int requiredSize = GeoHashGridParams.DEFAULT_MAX_NUM_CELLS; @@ -131,6 +132,7 @@ public class GeoHashGridParser implements Aggregator.Parser { final InternalAggregation aggregation = new InternalGeoHashGrid(name, requiredSize, Collections. emptyList(), pipelineAggregators, metaData); return new NonCollectingAggregator(name, aggregationContext, parent, pipelineAggregators, metaData) { + @Override public InternalAggregation buildEmptyAggregation() { return aggregation; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java index 694abf26e66..52d77e1594c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java @@ -28,6 +28,7 @@ import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -78,7 +79,7 @@ public class DateHistogramParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalDateHistogram.TYPE, context) + ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalDateHistogram.TYPE, context) .targetValueType(ValueType.DATE) .formattable(true) .timezoneAware(true) @@ -190,7 +191,7 @@ public class DateHistogramParser implements Aggregator.Parser { .timeZone(vsParser.input().timezone()) .offset(offset).build(); - ValuesSourceConfig config = vsParser.config(); + ValuesSourceConfig config = vsParser.config(); return new HistogramAggregator.Factory(aggregationName, config, rounding, order, keyed, minDocCount, extendedBounds, new InternalDateHistogram.Factory()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java index c738251d0e0..31ee6681d4e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java @@ -25,6 +25,7 @@ import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.aggregations.support.format.ValueParser; import org.elasticsearch.search.internal.SearchContext; @@ -46,7 +47,7 @@ public class HistogramParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalHistogram.TYPE, context) + ValuesSourceParser vsParser = ValuesSourceParser.numeric(aggregationName, InternalHistogram.TYPE, context) .targetValueType(ValueType.NUMERIC) .formattable(true) .build(); @@ -127,7 +128,7 @@ public class HistogramParser implements Aggregator.Parser { Rounding rounding = new Rounding.Interval(interval); if (offset != 0) { - rounding = new Rounding.OffsetRounding((Rounding.Interval) rounding, offset); + rounding = new Rounding.OffsetRounding(rounding, offset); } if (extendedBounds != null) { @@ -136,7 +137,7 @@ public class HistogramParser implements Aggregator.Parser { } return new HistogramAggregator.Factory(aggregationName, vsParser.config(), rounding, order, keyed, minDocCount, extendedBounds, - new InternalHistogram.Factory()); + new InternalHistogram.Factory<>()); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java index 1ae7341f611..38e15e216c2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregator.java @@ -81,9 +81,9 @@ public class MissingAggregator extends SingleBucketAggregator { return new InternalMissing(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData()); } - public static class Factory extends ValuesSourceAggregatorFactory { + public static class Factory extends ValuesSourceAggregatorFactory { - public Factory(String name, ValuesSourceConfig valueSourceConfig) { + public Factory(String name, ValuesSourceConfig valueSourceConfig) { super(name, InternalMissing.TYPE.name(), valueSourceConfig); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java index 6ecdc129dd0..4210e020d8c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -39,8 +40,7 @@ public class MissingParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - - ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalMissing.TYPE, context) + ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalMissing.TYPE, context) .scriptable(false) .build(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 8cb980954cb..4541aa9d142 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -203,7 +203,8 @@ public class SamplerAggregator extends SingleBucketAggregator { private int maxDocsPerValue; private String executionHint; - public DiversifiedFactory(String name, int shardSize, String executionHint, ValuesSourceConfig vsConfig, int maxDocsPerValue) { + public DiversifiedFactory(String name, int shardSize, String executionHint, ValuesSourceConfig vsConfig, + int maxDocsPerValue) { super(name, InternalSampler.TYPE.name(), vsConfig); this.shardSize = shardSize; this.maxDocsPerValue = maxDocsPerValue; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java index 498a7cb5c66..d51f43657f7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -55,10 +56,10 @@ public class SamplerParser implements Aggregator.Parser { String executionHint = null; int shardSize = DEFAULT_SHARD_SAMPLE_SIZE; int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT; - ValuesSourceParser vsParser = null; boolean diversityChoiceMade = false; - vsParser = ValuesSourceParser.any(aggregationName, InternalSampler.TYPE, context).scriptable(true).formattable(false).build(); + ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalSampler.TYPE, context).scriptable(true) + .formattable(false).build(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { @@ -88,7 +89,7 @@ public class SamplerParser implements Aggregator.Parser { } } - ValuesSourceConfig vsConfig = vsParser.config(); + ValuesSourceConfig vsConfig = vsParser.config(); if (vsConfig.valid()) { return new SamplerAggregator.DiversifiedFactory(aggregationName, shardSize, executionHint, vsConfig, maxDocsPerValue); } else { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 399e85728af..9b66fe01b67 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket.significant; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; @@ -80,8 +79,6 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggregatorFactory, List pipelineAggregators, Map metaData) throws IOException { - ValuesSource.Bytes.WithOrdinals valueSourceWithOrdinals = (ValuesSource.Bytes.WithOrdinals) valuesSource; - IndexSearcher indexSearcher = aggregationContext.searchContext().searcher(); final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(); return new GlobalOrdinalsSignificantTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, aggregationContext, @@ -98,9 +95,8 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac List pipelineAggregators, Map metaData) throws IOException { final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(); return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories, - (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, - aggregationContext, - parent, termsAggregatorFactory, pipelineAggregators, metaData); + (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, bucketCountThresholds, filter, aggregationContext, parent, + termsAggregatorFactory, pipelineAggregators, metaData); } }; @@ -143,7 +139,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac return new TermsAggregator.BucketCountThresholds(bucketCountThresholds); } - public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, + public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig valueSourceConfig, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, String executionHint, Query filter, SignificanceHeuristic significanceHeuristic) { super(name, SignificantStringTerms.TYPE.name(), valueSourceConfig); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java index 28e0fb5a812..b4b89c29c33 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java @@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; +import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -53,7 +54,7 @@ public class SignificantTermsParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { SignificantTermsParametersParser aggParser = new SignificantTermsParametersParser(significanceHeuristicParserMapper); - ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, SignificantStringTerms.TYPE, context) + ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, SignificantStringTerms.TYPE, context) .scriptable(false) .formattable(true) .build(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java index 891526c33c1..ecd9d3bc693 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractTermsParametersParser.java @@ -36,13 +36,13 @@ public abstract class AbstractTermsParametersParser { public static final ParseField SHARD_MIN_DOC_COUNT_FIELD_NAME = new ParseField("shard_min_doc_count"); public static final ParseField REQUIRED_SIZE_FIELD_NAME = new ParseField("size"); public static final ParseField SHOW_TERM_DOC_COUNT_ERROR = new ParseField("show_term_doc_count_error"); - + //These are the results of the parsing. private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(); private String executionHint = null; - + private SubAggCollectionMode collectMode = SubAggCollectionMode.DEPTH_FIRST; @@ -59,12 +59,12 @@ public abstract class AbstractTermsParametersParser { public IncludeExclude getIncludeExclude() { return includeExclude; } - + public SubAggCollectionMode getCollectionMode() { return collectMode; } - public void parse(String aggregationName, XContentParser parser, SearchContext context, ValuesSourceParser vsParser, IncludeExclude.Parser incExcParser) throws IOException { + public void parse(String aggregationName, XContentParser parser, SearchContext context, ValuesSourceParser vsParser, IncludeExclude.Parser incExcParser) throws IOException { bucketCountThresholds = getDefaultBucketCountThresholds(); XContentParser.Token token; String currentFieldName = null; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index 270dc009af2..04f7adf27b7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -165,7 +165,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory config, Terms.Order order, TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, String executionHint, SubAggCollectionMode executionMode, boolean showTermDocCountError) { super(name, StringTerms.TYPE.name(), config); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java index 478309d1bc0..a7b60e930ff 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java @@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.bucket.BucketUtils; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order; import org.elasticsearch.search.aggregations.bucket.terms.TermsParametersParser.OrderElement; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; +import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -45,7 +46,8 @@ public class TermsParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { TermsParametersParser aggParser = new TermsParametersParser(); - ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context).scriptable(true).formattable(true).build(); + ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, StringTerms.TYPE, context).scriptable(true) + .formattable(true).build(); IncludeExclude.Parser incExcParser = new IncludeExclude.Parser(); aggParser.parse(aggregationName, parser, context, vsParser, incExcParser); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java index e6755486225..81933144a5a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java @@ -62,6 +62,7 @@ public abstract class ValuesSourceMetricsAggregationBuilder config, long precisionThreshold) { super(name, InternalCardinality.TYPE.name(), config); this.precisionThreshold = precisionThreshold; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java index 68339457fe7..31552326450 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceParser; import org.elasticsearch.search.internal.SearchContext; @@ -43,7 +44,7 @@ public class CardinalityParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String name, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).formattable(false).build(); + ValuesSourceParser vsParser = ValuesSourceParser.any(name, InternalCardinality.TYPE, context).formattable(false).build(); long precisionThreshold = -1; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java index 764f6ce9384..0a9ea4a9072 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java @@ -40,7 +40,7 @@ public class ValueCountParser implements Aggregator.Parser { @Override public AggregatorFactory parse(String aggregationName, XContentParser parser, SearchContext context) throws IOException { - ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalValueCount.TYPE, context) + ValuesSourceParser vsParser = ValuesSourceParser.any(aggregationName, InternalValueCount.TYPE, context) .build(); XContentParser.Token token; @@ -54,6 +54,6 @@ public class ValueCountParser implements Aggregator.Parser { } } - return new ValueCountAggregator.Factory(aggregationName, vsParser.config()); + return new ValueCountAggregator.Factory<>(aggregationName, vsParser.config()); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java index b03bc8d6833..d9fe3ad66c0 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java @@ -53,6 +53,9 @@ import org.elasticsearch.search.aggregations.support.values.ScriptLongValues; import java.io.IOException; +/** + * How to load values for an aggregation. + */ public abstract class ValuesSource { /** @@ -528,6 +531,7 @@ public abstract class ValuesSource { return indexFieldData.load(context).getBytesValues(); } + @Override public org.elasticsearch.index.fielddata.MultiGeoPointValues geoPointValues(LeafReaderContext context) { return indexFieldData.load(context).getGeoPointValues(); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java index d0eaec2d8bc..3f56162a2f2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java @@ -78,19 +78,20 @@ public abstract class ValuesSourceAggregatorFactory ext boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) throws IOException; + @SuppressWarnings("unchecked") // Safe because we check the types with isAssignableFrom private void resolveValuesSourceConfigFromAncestors(String aggName, AggregatorFactory parent, Class requiredValuesSourceType) { - ValuesSourceConfig config; + ValuesSourceConfig config; while (parent != null) { if (parent instanceof ValuesSourceAggregatorFactory) { - config = ((ValuesSourceAggregatorFactory) parent).config; + config = ((ValuesSourceAggregatorFactory) parent).config; if (config != null && config.valid()) { if (requiredValuesSourceType == null || requiredValuesSourceType.isAssignableFrom(config.valueSourceType)) { ValueFormat format = config.format; - this.config = config; + this.config = (ValuesSourceConfig) config; // if the user explicitly defined a format pattern, we'll do our best to keep it even when we inherit the // value source form one of the ancestor aggregations if (this.config.formatPattern != null && format != null && format instanceof ValueFormat.Patternable) { - this.config.format = ((ValueFormat.Patternable) format).create(this.config.formatPattern); + this.config.format = ((ValueFormat.Patternable) format).create(this.config.formatPattern); } return; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java index fced5fdc913..7c260619173 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java @@ -48,13 +48,16 @@ import java.util.HashMap; import java.util.Map; /** - * + * Parses a description of where to load the value sent by a user into a + * ValuesSourceConfig which can be used to work with the values in various ways, + * one of which is to create an actual ValueSource (done with the help of + * AggregationContext). */ public class ValuesSourceParser { static final ParseField TIME_ZONE = new ParseField("time_zone"); - public static Builder any(String aggName, InternalAggregation.Type aggType, SearchContext context) { + public static Builder any(String aggName, InternalAggregation.Type aggType, SearchContext context) { return new Builder<>(aggName, aggType, context, ValuesSource.class); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 3b7848d7d2f..01ab53f6d63 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.lucene.search.function.WeightFactorFunction; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.engine.Engine; @@ -78,20 +79,6 @@ import java.util.Map; * */ public class DefaultSearchContext extends SearchContext { - /** - * Index setting describing the maximum value of from + size on a query. - */ - public static final String MAX_RESULT_WINDOW = "index.max_result_window"; - public static class Defaults { - /** - * Default maximum value of from + size on a query. 10,000 was chosen as - * a conservative default as it is sure to not cause trouble. Users can - * certainly profile their cluster and decide to set it to 100,000 - * safely. 1,000,000 is probably way to high for any cluster to set - * safely. - */ - public static final int MAX_RESULT_WINDOW = 10000; - } private final long id; private final ShardSearchRequest request; @@ -202,13 +189,13 @@ public class DefaultSearchContext extends SearchContext { long resultWindow = from + size; // We need settingsService's view of the settings because its dynamic. // indexService's isn't. - int maxResultWindow = indexService.getIndexSettings().getSettings().getAsInt(MAX_RESULT_WINDOW, Defaults.MAX_RESULT_WINDOW); + int maxResultWindow = indexService.getIndexSettings().getMaxResultWindow(); if (resultWindow > maxResultWindow) { throw new QueryPhaseExecutionException(this, "Result window is too large, from + size must be less than or equal to: [" + maxResultWindow + "] but was [" + resultWindow + "]. See the scroll api for a more efficient way to request large data sets. " - + "This limit can be set by changing the [" + DefaultSearchContext.MAX_RESULT_WINDOW + + "This limit can be set by changing the [" + IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey() + "] index level parameter."); } } diff --git a/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java index 8da14d23d96..d0c006eb5dc 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java +++ b/core/src/main/java/org/elasticsearch/search/profile/CollectorResult.java @@ -36,7 +36,7 @@ import java.util.Locale; * Collectors used in the search. Children CollectorResult's may be * embedded inside of a parent CollectorResult */ -public class CollectorResult implements ToXContent, Writeable { +public class CollectorResult implements ToXContent, Writeable { public static final String REASON_SEARCH_COUNT = "search_count"; public static final String REASON_SEARCH_TOP_HITS = "search_top_hits"; @@ -125,7 +125,7 @@ public class CollectorResult implements ToXContent, Writeable { builder = builder.startObject() .field(NAME.getPreferredName(), getName()) .field(REASON.getPreferredName(), getReason()) - .field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", (double) (getTime() / 1000000.0))); + .field(TIME.getPreferredName(), String.format(Locale.US, "%.10gms", getTime() / 1000000.0)); if (!children.isEmpty()) { builder = builder.startArray(CHILDREN.getPreferredName()); @@ -150,7 +150,7 @@ public class CollectorResult implements ToXContent, Writeable { } @Override - public Object readFrom(StreamInput in) throws IOException { + public CollectorResult readFrom(StreamInput in) throws IOException { return new CollectorResult(in); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 55c10aa298e..d8d4f2656c8 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.cluster.node.tasks; import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; @@ -27,6 +28,7 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.action.support.nodes.BaseNodesRequest; @@ -67,8 +69,10 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReferenceArray; +import static org.elasticsearch.action.support.PlainActionFuture.newFuture; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.not; @@ -418,7 +422,17 @@ public class TransportTasksActionTests extends ESTestCase { return startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request")); } - private ActionFuture startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request) throws InterruptedException { + private ActionFuture startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request) throws InterruptedException { + PlainActionFuture future = newFuture(); + startBlockingTestNodesAction(checkLatch, request, future); + return future; + } + + private Task startBlockingTestNodesAction(CountDownLatch checkLatch, ActionListener listener) throws InterruptedException { + return startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request"), listener); + } + + private Task startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request, ActionListener listener) throws InterruptedException { CountDownLatch actionLatch = new CountDownLatch(nodesCount); TestNodesAction[] actions = new TestNodesAction[nodesCount]; for (int i = 0; i < testNodes.length; i++) { @@ -442,17 +456,31 @@ public class TransportTasksActionTests extends ESTestCase { for (TestNode node : testNodes) { assertEquals(0, node.transportService.getTaskManager().getTasks().size()); } - ActionFuture future = actions[0].execute(request); + Task task = actions[0].execute(request, listener); logger.info("Awaiting for all actions to start"); actionLatch.await(); logger.info("Done waiting for all actions to start"); - return future; + return task; } public void testRunningTasksCount() throws Exception { connectNodes(testNodes); CountDownLatch checkLatch = new CountDownLatch(1); - ActionFuture future = startBlockingTestNodesAction(checkLatch); + CountDownLatch responseLatch = new CountDownLatch(1); + final AtomicReference responseReference = new AtomicReference<>(); + Task mainTask = startBlockingTestNodesAction(checkLatch, new ActionListener() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + responseReference.set(listTasksResponse); + responseLatch.countDown(); + } + + @Override + public void onFailure(Throwable e) { + logger.warn("Couldn't get list of tasks", e); + responseLatch.countDown(); + } + }); // Check task counts using taskManager Map localTasks = testNodes[0].transportService.getTaskManager().getTasks(); @@ -505,9 +533,17 @@ public class TransportTasksActionTests extends ESTestCase { assertEquals("NodeRequest[Test Request, true]", entry.getValue().get(0).getDescription()); } + // Make sure that the main task on coordinating node is the task that was returned to us by execute() + listTasksRequest.actions("testAction"); // only pick the main task + response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(1, response.getTasks().size()); + assertEquals(mainTask.getId(), response.getTasks().get(0).getId()); + // Release all tasks and wait for response checkLatch.countDown(); - NodesResponse responses = future.get(); + assertTrue(responseLatch.await(10, TimeUnit.SECONDS)); + + NodesResponse responses = responseReference.get(); assertEquals(0, responses.failureCount()); // Make sure that we don't have any lingering tasks diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java index a64f67a8cd5..56eaad11f94 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -130,7 +130,7 @@ public class ClusterStatsIT extends ESIntegTestCase { public void testValuesSmokeScreen() throws IOException { internalCluster().ensureAtMostNumDataNodes(5); internalCluster().ensureAtLeastNumDataNodes(1); - assertAcked(prepareCreate("test1").setSettings(settingsBuilder().put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0).build())); + assertAcked(prepareCreate("test1").setSettings(settingsBuilder().put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), 0).build())); index("test1", "type", "1", "f", "f"); /* * Ensure at least one shard is allocated otherwise the FS stats might diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index 1f0bcf0c17c..292705305cc 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -52,19 +52,13 @@ import static org.hamcrest.core.IsNull.notNullValue; @ClusterScope(scope = Scope.TEST) public class CreateIndexIT extends ESIntegTestCase { - public void testCreationDateGiven() { - prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, 4l)).get(); - ClusterStateResponse response = client().admin().cluster().prepareState().get(); - ClusterState state = response.getState(); - assertThat(state, notNullValue()); - MetaData metadata = state.getMetaData(); - assertThat(metadata, notNullValue()); - ImmutableOpenMap indices = metadata.getIndices(); - assertThat(indices, notNullValue()); - assertThat(indices.size(), equalTo(1)); - IndexMetaData index = indices.get("test"); - assertThat(index, notNullValue()); - assertThat(index.getCreationDate(), equalTo(4l)); + public void testCreationDateGivenFails() { + try { + prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, 4l)).get(); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("unknown setting [index.creation_date]", ex.getMessage()); + } } public void testCreationDateGenerated() { @@ -112,41 +106,27 @@ public class CreateIndexIT extends ESIntegTestCase { } public void testInvalidShardCountSettings() throws Exception { + int value = randomIntBetween(-10, 0); try { prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(-10, 0)) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, value) .build()) .get(); fail("should have thrown an exception about the primary shard count"); } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true)); + assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_shards] must be >= 1", e.getMessage()); } - + value = randomIntBetween(-10, -1); try { prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(-10, -1)) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, value) .build()) .get(); fail("should have thrown an exception about the replica shard count"); } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true)); + assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_replicas] must be >= 0", e.getMessage()); } - try { - prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(-10, 0)) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(-10, -1)) - .build()) - .get(); - fail("should have thrown an exception about the shard count"); - } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true)); - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true)); - } } public void testCreateIndexWithBlocks() { @@ -164,39 +144,38 @@ public class CreateIndexIT extends ESIntegTestCase { disableIndexBlock("test", IndexMetaData.SETTING_BLOCKS_METADATA); } + public void testUnknownSettingFails() { + try { + prepareCreate("test").setSettings(Settings.builder() + .put("index.unknown.value", "this must fail") + .build()) + .get(); + fail("should have thrown an exception about the shard count"); + } catch (IllegalArgumentException e) { + assertEquals("unknown setting [index.unknown.value]", e.getMessage()); + } + } + public void testInvalidShardCountSettingsWithoutPrefix() throws Exception { + int value = randomIntBetween(-10, 0); try { prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, 0)) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), value) .build()) .get(); fail("should have thrown an exception about the shard count"); } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true)); + assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_shards] must be >= 1", e.getMessage()); } + value = randomIntBetween(-10, -1); try { prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, -1)) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), value) .build()) .get(); fail("should have thrown an exception about the shard count"); } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true)); - } - try { - prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, 0)) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, -1)) - .build()) - .get(); - fail("should have thrown an exception about the shard count"); - } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true)); - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true)); + assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_replicas] must be >= 0", e.getMessage()); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java index a2b4d97235d..36aad4fb36e 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java @@ -24,15 +24,23 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.indices.IndexClosedException; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; +import java.util.Collection; import java.util.List; import static org.hamcrest.Matchers.is; public class IndicesSegmentsRequestTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + @Before public void setupIndex() { Settings settings = Settings.builder() diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index 3a81f0ba0d9..d7f85ec5650 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -35,10 +35,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.MockIndexEventListener; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSIndexStore; +import org.elasticsearch.test.transport.MockTransportService; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -57,6 +62,12 @@ import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class IndicesShardStoreRequestIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList( MockFSIndexStore.TestPlugin.class); + } + public void testEmpty() { ensureGreen(); IndicesShardStoresResponse rsp = client().admin().indices().prepareShardStores().get(); @@ -148,7 +159,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { internalCluster().ensureAtLeastNumDataNodes(2); assertAcked(prepareCreate(index).setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "5") - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) )); indexRandomData(index); ensureGreen(index); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index 09079be6ee9..54bf0115d87 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -93,7 +93,7 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase { null, new HashSet<>(), null, - null); + null, null); MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, new AliasValidator(Settings.EMPTY)); final List throwables = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 81eb832be9a..66fb8aa3f21 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -110,7 +110,7 @@ public class BulkRequestTests extends ESTestCase { public void testBulkAddIterable() { BulkRequest bulkRequest = Requests.bulkRequest(); - List requests = new ArrayList<>(); + List> requests = new ArrayList<>(); requests.add(new IndexRequest("test", "test", "id").source("field", "value")); requests.add(new UpdateRequest("test", "test", "id").doc("field", "value")); requests.add(new DeleteRequest("test", "test", "id")); diff --git a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index ee0ceef1721..7301d37488b 100644 --- a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -19,10 +19,11 @@ package org.elasticsearch.action.search; +import java.io.IOException; + import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -34,15 +35,14 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; -import java.io.IOException; -import java.util.Collections; +import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class MultiSearchRequestTests extends ESTestCase { public void testSimpleAdd() throws Exception { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, Collections.singleton(new MatchAllQueryParser()), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser())); byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch1.json"); MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null, null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY); @@ -69,7 +69,7 @@ public class MultiSearchRequestTests extends ESTestCase { } public void testSimpleAdd2() throws Exception { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, Collections.singleton(new MatchAllQueryParser()), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser())); byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch2.json"); MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null, null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY); @@ -88,7 +88,7 @@ public class MultiSearchRequestTests extends ESTestCase { } public void testSimpleAdd3() throws Exception { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, Collections.singleton(new MatchAllQueryParser()), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser())); byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch3.json"); MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null, null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY); @@ -108,7 +108,7 @@ public class MultiSearchRequestTests extends ESTestCase { } public void testSimpleAdd4() throws Exception { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, Collections.singleton(new MatchAllQueryParser()), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser())); byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch4.json"); MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null, null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY); @@ -130,7 +130,7 @@ public class MultiSearchRequestTests extends ESTestCase { } public void testSimpleAdd5() throws Exception { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, Collections.singleton(new MatchAllQueryParser()), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser())); byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch5.json"); MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), true, null, null, null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY); diff --git a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 6a14989be1a..fed4e1d6384 100644 --- a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -442,7 +442,7 @@ public class TransportActionFilterChainTests extends ESTestCase { void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain); } - public static class TestRequest extends ActionRequest { + public static class TestRequest extends ActionRequest { @Override public ActionRequestValidationException validate() { return null; diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 90d91dfb197..d94049c036f 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -410,7 +410,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { // simulate node failure totalShards += map.get(entry.getKey()).size(); totalFailedShards += map.get(entry.getKey()).size(); - transport.handleResponse(requestId, new Exception()); + transport.handleRemoteError(requestId, new Exception()); } else { List shards = map.get(entry.getKey()); List shardResults = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index 980558c2716..c9a7d9bd2d2 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -297,14 +297,14 @@ public class TransportMasterNodeActionTests extends ESTestCase { assertThat(capturedRequest.action, equalTo("testAction")); if (failsWithConnectTransportException) { - transport.handleResponse(capturedRequest.requestId, new ConnectTransportException(remoteNode, "Fake error")); + transport.handleRemoteError(capturedRequest.requestId, new ConnectTransportException(remoteNode, "Fake error")); assertFalse(listener.isDone()); clusterService.setState(ClusterStateCreationUtils.state(localNode, localNode, allNodes)); assertTrue(listener.isDone()); listener.get(); } else { Throwable t = new Throwable(); - transport.handleResponse(capturedRequest.requestId, t); + transport.handleRemoteError(capturedRequest.requestId, t); assertTrue(listener.isDone()); try { listener.get(); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index d1abe8653f0..c3084b93eb8 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.ReplicationResponse; +import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushResponse; @@ -100,7 +101,7 @@ public class BroadcastReplicationTests extends ESTestCase { clusterService.setState(state(index, randomBoolean(), randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED, ShardRoutingState.UNASSIGNED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - Future response = (broadcastReplicationAction.execute(new BroadcastRequest().indices(index))); + Future response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index))); for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { if (randomBoolean()) { shardRequests.v2().onFailure(new NoShardAvailableActionException(shardRequests.v1())); @@ -119,7 +120,7 @@ public class BroadcastReplicationTests extends ESTestCase { clusterService.setState(state(index, randomBoolean(), ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - Future response = (broadcastReplicationAction.execute(new BroadcastRequest().indices(index))); + Future response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index))); for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { ReplicationResponse replicationResponse = new ReplicationResponse(); replicationResponse.setShardInfo(new ReplicationResponse.ShardInfo(1, 1, new ReplicationResponse.ShardInfo.Failure[0])); @@ -134,7 +135,7 @@ public class BroadcastReplicationTests extends ESTestCase { int numShards = randomInt(3); clusterService.setState(stateWithAssignedPrimariesAndOneReplica(index, numShards)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - Future response = (broadcastReplicationAction.execute(new BroadcastRequest().indices(index))); + Future response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index))); int succeeded = 0; int failed = 0; for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { @@ -164,7 +165,7 @@ public class BroadcastReplicationTests extends ESTestCase { public void testNoShards() throws InterruptedException, ExecutionException, IOException { clusterService.setState(stateWithNoShard()); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - BroadcastResponse response = executeAndAssertImmediateResponse(broadcastReplicationAction, new BroadcastRequest()); + BroadcastResponse response = executeAndAssertImmediateResponse(broadcastReplicationAction, new DummyBroadcastRequest()); assertBroadcastResponse(0, 0, 0, response, null); } @@ -174,16 +175,19 @@ public class BroadcastReplicationTests extends ESTestCase { ClusterState clusterState = state(index, randomBoolean(), randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED, ShardRoutingState.UNASSIGNED); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - List shards = broadcastReplicationAction.shards(new BroadcastRequest().indices(shardId.index().name()), clusterState); + List shards = broadcastReplicationAction.shards(new DummyBroadcastRequest().indices(shardId.index().name()), clusterState); assertThat(shards.size(), equalTo(1)); assertThat(shards.get(0), equalTo(shardId)); } - private class TestBroadcastReplicationAction extends TransportBroadcastReplicationAction { + private class TestBroadcastReplicationAction extends TransportBroadcastReplicationAction { protected final Set>> capturedShardRequests = ConcurrentCollections.newConcurrentSet(); - public TestBroadcastReplicationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportReplicationAction replicatedBroadcastShardAction) { - super("test-broadcast-replication-action", BroadcastRequest::new, settings, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction); + public TestBroadcastReplicationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + TransportReplicationAction replicatedBroadcastShardAction) { + super("test-broadcast-replication-action", DummyBroadcastRequest::new, settings, threadPool, clusterService, transportService, + actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction); } @Override @@ -192,17 +196,18 @@ public class BroadcastReplicationTests extends ESTestCase { } @Override - protected ReplicationRequest newShardRequest(BroadcastRequest request, ShardId shardId) { - return new ReplicationRequest().setShardId(shardId); + protected BasicReplicationRequest newShardRequest(DummyBroadcastRequest request, ShardId shardId) { + return new BasicReplicationRequest().setShardId(shardId); } @Override - protected BroadcastResponse newResponse(int successfulShards, int failedShards, int totalNumCopies, List shardFailures) { + protected BroadcastResponse newResponse(int successfulShards, int failedShards, int totalNumCopies, + List shardFailures) { return new BroadcastResponse(totalNumCopies, successfulShards, failedShards, shardFailures); } @Override - protected void shardExecute(BroadcastRequest request, ShardId shardId, ActionListener shardActionListener) { + protected void shardExecute(DummyBroadcastRequest request, ShardId shardId, ActionListener shardActionListener) { capturedShardRequests.add(new Tuple<>(shardId, shardActionListener)); } } @@ -216,7 +221,7 @@ public class BroadcastReplicationTests extends ESTestCase { return flushResponse; } - public BroadcastResponse executeAndAssertImmediateResponse(TransportBroadcastReplicationAction broadcastAction, BroadcastRequest request) throws InterruptedException, ExecutionException { + public BroadcastResponse executeAndAssertImmediateResponse(TransportBroadcastReplicationAction broadcastAction, DummyBroadcastRequest request) throws InterruptedException, ExecutionException { return (BroadcastResponse) broadcastAction.execute(request).actionGet("5s"); } @@ -228,4 +233,8 @@ public class BroadcastReplicationTests extends ESTestCase { assertThat(response.getShardFailures()[0].getCause().getCause(), instanceOf(exceptionClass)); } } + + public static class DummyBroadcastRequest extends BroadcastRequest { + + } } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 0a390ea3706..9fdbdf1cb38 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -488,7 +489,7 @@ public class TransportReplicationActionTests extends ESTestCase { TransportReplicationAction.ReplicationPhase replicationPhase = action.new ReplicationPhase(request, new Response(), - request.shardId(), createTransportChannel(listener), reference, null); + request.shardId(), createTransportChannel(listener), reference); assertThat(replicationPhase.totalShards(), equalTo(totalShards)); assertThat(replicationPhase.pending(), equalTo(assignedReplicas)); @@ -545,7 +546,7 @@ public class TransportReplicationActionTests extends ESTestCase { t = new IndexShardNotStartedException(shardId, IndexShardState.RECOVERING); } logger.debug("--> simulating failure on {} with [{}]", capturedRequest.node, t.getClass().getSimpleName()); - transport.handleResponse(capturedRequest.requestId, t); + transport.handleRemoteError(capturedRequest.requestId, t); if (criticalFailure) { CapturingTransport.CapturedRequest[] shardFailedRequests = transport.getCapturedRequestsAndClear(); assertEquals(1, shardFailedRequests.length); @@ -557,7 +558,23 @@ public class TransportReplicationActionTests extends ESTestCase { // the shard the request was sent to and the shard to be failed should be the same assertEquals(shardRoutingEntry.getShardRouting(), routing); failures.add(shardFailedRequest); - transport.handleResponse(shardFailedRequest.requestId, TransportResponse.Empty.INSTANCE); + if (randomBoolean()) { + // simulate master left and test that the shard failure is retried + int numberOfRetries = randomIntBetween(1, 4); + CapturingTransport.CapturedRequest currentRequest = shardFailedRequest; + for (int retryNumber = 0; retryNumber < numberOfRetries; retryNumber++) { + // force a new cluster state to simulate a new master having been elected + clusterService.setState(ClusterState.builder(clusterService.state())); + transport.handleRemoteError(currentRequest.requestId, new NotMasterException("shard-failed-test")); + CapturingTransport.CapturedRequest[] retryRequests = transport.getCapturedRequestsAndClear(); + assertEquals(1, retryRequests.length); + currentRequest = retryRequests[0]; + } + // now simulate that the last retry succeeded + transport.handleResponse(currentRequest.requestId, TransportResponse.Empty.INSTANCE); + } else { + transport.handleResponse(shardFailedRequest.requestId, TransportResponse.Empty.INSTANCE); + } } } else { successful++; @@ -583,7 +600,7 @@ public class TransportReplicationActionTests extends ESTestCase { assertIndexShardCounter(1); } - public void testCounterOnPrimary() throws InterruptedException, ExecutionException, IOException { + public void testCounterOnPrimary() throws Exception { final String index = "test"; final ShardId shardId = new ShardId(index, 0); // no replica, we only want to test on primary @@ -611,9 +628,7 @@ public class TransportReplicationActionTests extends ESTestCase { t.start(); // shard operation should be ongoing, so the counter is at 2 // we have to wait here because increment happens in thread - awaitBusy(() -> count.get() == 2); - - assertIndexShardCounter(2); + assertBusy(() -> assertIndexShardCounter(2)); assertThat(transport.capturedRequests().length, equalTo(0)); ((ActionWithDelay) action).countDownLatch.countDown(); t.join(); @@ -647,7 +662,7 @@ public class TransportReplicationActionTests extends ESTestCase { CapturingTransport.CapturedRequest[] replicationRequests = transport.getCapturedRequestsAndClear(); assertThat(replicationRequests.length, equalTo(1)); // try with failure response - transport.handleResponse(replicationRequests[0].requestId, new CorruptIndexException("simulated", (String) null)); + transport.handleRemoteError(replicationRequests[0].requestId, new CorruptIndexException("simulated", (String) null)); CapturingTransport.CapturedRequest[] shardFailedRequests = transport.getCapturedRequestsAndClear(); assertEquals(1, shardFailedRequests.length); transport.handleResponse(shardFailedRequests[0].requestId, TransportResponse.Empty.INSTANCE); @@ -664,7 +679,7 @@ public class TransportReplicationActionTests extends ESTestCase { @Override public void run() { try { - replicaOperationTransportHandler.messageReceived(new Request(), createTransportChannel(new PlainActionFuture<>())); + replicaOperationTransportHandler.messageReceived(new Request().setShardId(shardId), createTransportChannel(new PlainActionFuture<>())); } catch (Exception e) { } } @@ -672,7 +687,7 @@ public class TransportReplicationActionTests extends ESTestCase { t.start(); // shard operation should be ongoing, so the counter is at 2 // we have to wait here because increment happens in thread - awaitBusy(() -> count.get() == 2); + assertBusy(() -> assertIndexShardCounter(2)); ((ActionWithDelay) action).countDownLatch.countDown(); t.join(); // operation should have finished and counter decreased because no outstanding replica requests diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index fce431238dd..344846c363e 100644 --- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -198,7 +198,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { long requestId = transport.capturedRequests()[0].requestId; transport.clear(); // this should not trigger retry or anything and the listener should report exception immediately - transport.handleResponse(requestId, new TransportException("a generic transport exception", new Exception("generic test exception"))); + transport.handleRemoteError(requestId, new TransportException("a generic transport exception", new Exception("generic test exception"))); try { // result should return immediately @@ -240,7 +240,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { long requestId = transport.capturedRequests()[0].requestId; transport.clear(); DiscoveryNode node = clusterService.state().getNodes().getLocalNode(); - transport.handleResponse(requestId, new ConnectTransportException(node, "test exception")); + transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception")); // trigger cluster state observer clusterService.setState(ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED)); assertThat(transport.capturedRequests().length, equalTo(1)); @@ -258,7 +258,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { long requestId = transport.capturedRequests()[0].requestId; transport.clear(); DiscoveryNode node = clusterService.state().getNodes().getLocalNode(); - transport.handleResponse(requestId, new ConnectTransportException(node, "test exception")); + transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception")); // wait until the timeout was triggered and we actually tried to send for the second time assertBusy(new Runnable() { @@ -270,7 +270,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { // let it fail the second time too requestId = transport.capturedRequests()[0].requestId; - transport.handleResponse(requestId, new ConnectTransportException(node, "test exception")); + transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception")); try { // result should return immediately assertTrue(listener.isDone()); @@ -313,4 +313,4 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { } } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index d1217ea6f31..35e33b378bf 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -411,7 +411,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { protected TermVectorsRequestBuilder getRequestForConfig(TestConfig config) { return client().prepareTermVectors(randomBoolean() ? config.doc.index : config.doc.alias, config.doc.type, config.doc.id).setPayloads(config.requestPayloads) .setOffsets(config.requestOffsets).setPositions(config.requestPositions).setFieldStatistics(true).setTermStatistics(true) - .setSelectedFields(config.selectedFields); + .setSelectedFields(config.selectedFields).setRealtime(false); } protected Fields getTermVectorsFromLucene(DirectoryReader directoryReader, TestDoc doc) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java index 1cbe05da6a0..867d054a712 100644 --- a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -443,7 +443,7 @@ public class IndexAliasesIT extends ESIntegTestCase { public void testWaitForAliasCreationSingleShard() throws Exception { logger.info("--> creating index [test]"); - assertAcked(admin().indices().create(createIndexRequest("test").settings(settingsBuilder().put("index.numberOfReplicas", 0).put("index.numberOfShards", 1))).get()); + assertAcked(admin().indices().create(createIndexRequest("test").settings(settingsBuilder().put("index.number_of_replicas", 0).put("index.number_of_shards", 1))).get()); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 1518f2ebc3a..8a18b728200 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -51,12 +51,14 @@ import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.mapper.string.StringFieldMapperPositionIncrementGapTests; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.MergePolicyConfig; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; @@ -73,6 +75,7 @@ import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Locale; @@ -91,6 +94,12 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { // TODO: test for proper exception on unsupported indexes (maybe via separate test?) // We have a 0.20.6.zip etc for this. + + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); + } + List indexes; List unsupportedIndexes; static Path singleDataPath; diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 31487614c99..ac2845c86ab 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -139,8 +139,8 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { public void testClusterInfoServiceCollectsInformation() throws Exception { internalCluster().startNodesAsync(2).get(); assertAcked(prepareCreate("test").setSettings(settingsBuilder() - .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE).build())); + .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), 0) + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE).build())); ensureGreen("test"); InternalTestCluster internalTestCluster = internalCluster(); // Get the cluster info service on the master node diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 0e9f6cd9e04..7cfa0eeaaa7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -31,15 +31,13 @@ import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllo import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; -import org.elasticsearch.cluster.settings.DynamicSettings; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.index.settings.IndexDynamicSettings; public class ClusterModuleTests extends ModuleTestCase { @@ -93,18 +91,19 @@ public class ClusterModuleTests extends ModuleTestCase { } public void testRegisterIndexDynamicSettingDuplicate() { - ClusterModule module = new ClusterModule(Settings.EMPTY); + SettingsModule module = new SettingsModule(Settings.EMPTY, new SettingsFilter(Settings.EMPTY)); try { - module.registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY); + module.registerSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING); } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE + "] twice"); + assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice"); } } public void testRegisterIndexDynamicSetting() { - ClusterModule module = new ClusterModule(Settings.EMPTY); - module.registerIndexDynamicSetting("foo.bar", Validator.EMPTY); - assertInstanceBindingWithAnnotation(module, DynamicSettings.class, dynamicSettings -> dynamicSettings.hasDynamicSetting("foo.bar"), IndexDynamicSettings.class); + final SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY); + SettingsModule module = new SettingsModule(Settings.EMPTY, settingsFilter); + module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.INDEX)); + assertInstanceBinding(module, IndexScopedSettings.class, service -> service.hasDynamicSetting("foo.bar")); } public void testRegisterAllocationDeciderDuplicate() { diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index f69c9149a9c..c59405f2345 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -20,41 +20,81 @@ package org.elasticsearch.cluster.action.shard; import org.apache.lucene.index.CorruptIndexException; +import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.discovery.Discovery; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.ReceiveTimeoutTransportException; +import org.elasticsearch.transport.NodeDisconnectedException; +import org.elasticsearch.transport.NodeNotConnectedException; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.SendRequestTransportException; import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.LongConsumer; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithStartedPrimary; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.is; public class ShardStateActionTests extends ESTestCase { private static ThreadPool THREAD_POOL; - private ShardStateAction shardStateAction; + private TestShardStateAction shardStateAction; private CapturingTransport transport; private TransportService transportService; private TestClusterService clusterService; + private static class TestShardStateAction extends ShardStateAction { + public TestShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) { + super(settings, clusterService, transportService, allocationService, routingService); + } + + private Runnable onBeforeWaitForNewMasterAndRetry; + + public void setOnBeforeWaitForNewMasterAndRetry(Runnable onBeforeWaitForNewMasterAndRetry) { + this.onBeforeWaitForNewMasterAndRetry = onBeforeWaitForNewMasterAndRetry; + } + + private Runnable onAfterWaitForNewMasterAndRetry; + + public void setOnAfterWaitForNewMasterAndRetry(Runnable onAfterWaitForNewMasterAndRetry) { + this.onAfterWaitForNewMasterAndRetry = onAfterWaitForNewMasterAndRetry; + } + + @Override + protected void waitForNewMasterAndRetry(String actionName, ClusterStateObserver observer, ShardRoutingEntry shardRoutingEntry, Listener listener) { + onBeforeWaitForNewMasterAndRetry.run(); + super.waitForNewMasterAndRetry(actionName, observer, shardRoutingEntry, listener); + onAfterWaitForNewMasterAndRetry.run(); + } + } + @BeforeClass public static void startThreadPool() { THREAD_POOL = new ThreadPool("ShardStateActionTest"); @@ -68,7 +108,9 @@ public class ShardStateActionTests extends ESTestCase { clusterService = new TestClusterService(THREAD_POOL); transportService = new TransportService(transport, THREAD_POOL); transportService.start(); - shardStateAction = new ShardStateAction(Settings.EMPTY, clusterService, transportService, null, null); + shardStateAction = new TestShardStateAction(Settings.EMPTY, clusterService, transportService, null, null); + shardStateAction.setOnBeforeWaitForNewMasterAndRetry(() -> {}); + shardStateAction.setOnAfterWaitForNewMasterAndRetry(() -> {}); } @Override @@ -84,36 +126,144 @@ public class ShardStateActionTests extends ESTestCase { THREAD_POOL = null; } - public void testNoMaster() { + public void testSuccess() throws InterruptedException { final String index = "test"; clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); - DiscoveryNodes.Builder builder = DiscoveryNodes.builder(clusterService.state().nodes()); - builder.masterNodeId(null); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(builder)); - String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); - AtomicBoolean noMaster = new AtomicBoolean(); - assert !noMaster.get(); + AtomicBoolean success = new AtomicBoolean(); + CountDownLatch latch = new CountDownLatch(1); - shardStateAction.shardFailed(clusterService.state(), getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + ShardRouting shardRouting = getRandomShardRouting(index); + shardStateAction.shardFailed(shardRouting, indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { @Override - public void onShardFailedNoMaster() { - noMaster.set(true); + public void onSuccess() { + success.set(true); + latch.countDown(); } @Override - public void onShardFailedFailure(DiscoveryNode master, TransportException e) { - + public void onFailure(Throwable t) { + success.set(false); + latch.countDown(); + assert false; } }); - assertTrue(noMaster.get()); + CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + assertEquals(1, capturedRequests.length); + // the request is a shard failed request + assertThat(capturedRequests[0].request, is(instanceOf(ShardStateAction.ShardRoutingEntry.class))); + ShardStateAction.ShardRoutingEntry shardRoutingEntry = (ShardStateAction.ShardRoutingEntry)capturedRequests[0].request; + // for the right shard + assertEquals(shardRouting, shardRoutingEntry.getShardRouting()); + // sent to the master + assertEquals(clusterService.state().nodes().masterNode().getId(), capturedRequests[0].node.getId()); + + transport.handleResponse(capturedRequests[0].requestId, TransportResponse.Empty.INSTANCE); + + latch.await(); + assertTrue(success.get()); } - public void testFailure() { + public void testNoMaster() throws InterruptedException { + final String index = "test"; + + clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); + + DiscoveryNodes.Builder noMasterBuilder = DiscoveryNodes.builder(clusterService.state().nodes()); + noMasterBuilder.masterNodeId(null); + clusterService.setState(ClusterState.builder(clusterService.state()).nodes(noMasterBuilder)); + + String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); + + CountDownLatch latch = new CountDownLatch(1); + AtomicInteger retries = new AtomicInteger(); + AtomicBoolean success = new AtomicBoolean(); + + setUpMasterRetryVerification(1, retries, latch, requestId -> {}); + + shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + success.set(true); + latch.countDown(); + } + + @Override + public void onFailure(Throwable e) { + success.set(false); + latch.countDown(); + assert false; + } + }); + + latch.await(); + + assertThat(retries.get(), equalTo(1)); + assertTrue(success.get()); + } + + public void testMasterChannelException() throws InterruptedException { + final String index = "test"; + + clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); + + String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); + + CountDownLatch latch = new CountDownLatch(1); + AtomicInteger retries = new AtomicInteger(); + AtomicBoolean success = new AtomicBoolean(); + AtomicReference throwable = new AtomicReference<>(); + + LongConsumer retryLoop = requestId -> { + if (randomBoolean()) { + transport.handleRemoteError( + requestId, + randomFrom(new NotMasterException("simulated"), new Discovery.FailedToCommitClusterStateException("simulated"))); + } else { + if (randomBoolean()) { + transport.handleLocalError(requestId, new NodeNotConnectedException(null, "simulated")); + } else { + transport.handleError(requestId, new NodeDisconnectedException(null, ShardStateAction.SHARD_FAILED_ACTION_NAME)); + } + } + }; + + final int numberOfRetries = randomIntBetween(1, 256); + setUpMasterRetryVerification(numberOfRetries, retries, latch, retryLoop); + + shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + success.set(true); + latch.countDown(); + } + + @Override + public void onFailure(Throwable t) { + success.set(false); + throwable.set(t); + latch.countDown(); + assert false; + } + }); + + final CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + assertThat(capturedRequests.length, equalTo(1)); + assertFalse(success.get()); + assertThat(retries.get(), equalTo(0)); + retryLoop.accept(capturedRequests[0].requestId); + + latch.await(); + assertNull(throwable.get()); + assertThat(retries.get(), equalTo(numberOfRetries)); + assertTrue(success.get()); + } + + public void testUnhandledFailure() { final String index = "test"; clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); @@ -121,59 +271,28 @@ public class ShardStateActionTests extends ESTestCase { String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); AtomicBoolean failure = new AtomicBoolean(); - assert !failure.get(); - shardStateAction.shardFailed(clusterService.state(), getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { @Override - public void onShardFailedNoMaster() { - + public void onSuccess() { + failure.set(false); + assert false; } @Override - public void onShardFailedFailure(DiscoveryNode master, TransportException e) { + public void onFailure(Throwable t) { failure.set(true); } }); final CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); assertThat(capturedRequests.length, equalTo(1)); - assert !failure.get(); - transport.handleResponse(capturedRequests[0].requestId, new TransportException("simulated")); + assertFalse(failure.get()); + transport.handleRemoteError(capturedRequests[0].requestId, new TransportException("simulated")); assertTrue(failure.get()); } - public void testTimeout() throws InterruptedException { - final String index = "test"; - - clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); - - String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); - - AtomicBoolean progress = new AtomicBoolean(); - AtomicBoolean timedOut = new AtomicBoolean(); - - TimeValue timeout = new TimeValue(1, TimeUnit.MILLISECONDS); - CountDownLatch latch = new CountDownLatch(1); - shardStateAction.shardFailed(clusterService.state(), getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), timeout, new ShardStateAction.Listener() { - @Override - public void onShardFailedFailure(DiscoveryNode master, TransportException e) { - if (e instanceof ReceiveTimeoutTransportException) { - assertFalse(progress.get()); - timedOut.set(true); - } - latch.countDown(); - } - }); - - latch.await(); - progress.set(true); - assertTrue(timedOut.get()); - - final CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); - assertThat(capturedRequests.length, equalTo(1)); - } - private ShardRouting getRandomShardRouting(String index) { IndexRoutingTable indexRoutingTable = clusterService.state().routingTable().index(index); ShardsIterator shardsIterator = indexRoutingTable.randomAllActiveShardsIt(); @@ -182,6 +301,34 @@ public class ShardStateActionTests extends ESTestCase { return shardRouting; } + private void setUpMasterRetryVerification(int numberOfRetries, AtomicInteger retries, CountDownLatch latch, LongConsumer retryLoop) { + shardStateAction.setOnBeforeWaitForNewMasterAndRetry(() -> { + DiscoveryNodes.Builder masterBuilder = DiscoveryNodes.builder(clusterService.state().nodes()); + masterBuilder.masterNodeId(clusterService.state().nodes().masterNodes().iterator().next().value.id()); + clusterService.setState(ClusterState.builder(clusterService.state()).nodes(masterBuilder)); + }); + + shardStateAction.setOnAfterWaitForNewMasterAndRetry(() -> verifyRetry(numberOfRetries, retries, latch, retryLoop)); + } + + private void verifyRetry(int numberOfRetries, AtomicInteger retries, CountDownLatch latch, LongConsumer retryLoop) { + // assert a retry request was sent + final CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + if (capturedRequests.length == 1) { + retries.incrementAndGet(); + if (retries.get() == numberOfRetries) { + // finish the request + transport.handleResponse(capturedRequests[0].requestId, TransportResponse.Empty.INSTANCE); + } else { + retryLoop.accept(capturedRequests[0].requestId); + } + } else { + // there failed to be a retry request + // release the driver thread to fail the test + latch.countDown(); + } + } + private Throwable getSimulatedFailure() { return new CorruptIndexException("simulated", (String) null); } diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 6b406a3bfdf..6835f343563 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; -import org.elasticsearch.cluster.routing.allocation.command.AllocateAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; @@ -100,7 +100,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate shard 1, *under dry_run*"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) .setDryRun(true) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); @@ -113,7 +113,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate shard 1, actually allocating, no dry run"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING)); @@ -212,7 +212,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate shard 1, actually allocating, no dry run"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING)); @@ -246,7 +246,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate primary"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING)); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java new file mode 100644 index 00000000000..32312f34e21 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +public class AutoExpandReplicasTests extends ESTestCase { + + public void testParseSettings() { + AutoExpandReplicas autoExpandReplicas = AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "0-5").build()); + assertEquals(0, autoExpandReplicas.getMinReplicas()); + assertEquals(5, autoExpandReplicas.getMaxReplicas(8)); + assertEquals(2, autoExpandReplicas.getMaxReplicas(3)); + + autoExpandReplicas = AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "0-all").build()); + assertEquals(0, autoExpandReplicas.getMinReplicas()); + assertEquals(5, autoExpandReplicas.getMaxReplicas(6)); + assertEquals(2, autoExpandReplicas.getMaxReplicas(3)); + + autoExpandReplicas = AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "1-all").build()); + assertEquals(1, autoExpandReplicas.getMinReplicas()); + assertEquals(5, autoExpandReplicas.getMaxReplicas(6)); + assertEquals(2, autoExpandReplicas.getMaxReplicas(3)); + + } + + public void testInvalidValues() { + try { + AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "boom").build()); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("failed to parse [index.auto_expand_replicas] from value: [boom] at index -1", ex.getMessage()); + } + + try { + AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "1-boom").build()); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("failed to parse [index.auto_expand_replicas] from value: [1-boom] at index 1", ex.getMessage()); + assertEquals("For input string: \"boom\"", ex.getCause().getMessage()); + } + + try { + AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "boom-1").build()); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("failed to parse [index.auto_expand_replicas] from value: [boom-1] at index 4", ex.getMessage()); + assertEquals("For input string: \"boom\"", ex.getCause().getMessage()); + } + + try { + AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "2-1").build()); + } catch (IllegalArgumentException ex) { + assertEquals("[index.auto_expand_replicas] minReplicas must be =< maxReplicas but wasn't 2 > 1", ex.getMessage()); + } + + } +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationIT.java index 2d704380ae0..169e68f4637 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationIT.java @@ -47,7 +47,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, 0)).get(); + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0)).get(); ensureGreen("test"); indexRandomData(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); @@ -66,7 +66,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get(); + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1))).get(); ensureGreen("test"); indexRandomData(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); @@ -90,14 +90,14 @@ public class DelayedAllocationIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get(); + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(100))).get(); ensureGreen("test"); indexRandomData(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); ensureGreen("test"); internalCluster().startNode(); // do a second round with longer delay to make sure it happens - assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get()); + assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(100))).get()); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); ensureGreen("test"); } @@ -112,7 +112,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get(); + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1))).get(); ensureGreen("test"); indexRandomData(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); @@ -123,7 +123,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { } }); assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(1)); - assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get()); + assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(100))).get()); ensureGreen("test"); assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(0)); } @@ -138,7 +138,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get(); + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1))).get(); ensureGreen("test"); indexRandomData(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); @@ -149,7 +149,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { } }); assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(1)); - assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(0))).get()); + assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(0))).get()); ensureGreen("test"); assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(0)); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index dc468ded962..9203b270b2f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -19,10 +19,17 @@ package org.elasticsearch.cluster.routing; * under the License. */ +import com.carrotsearch.hppc.cursors.IntObjectCursor; +import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequestBuilder; +import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; +import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.disruption.NetworkDisconnectPartition; @@ -33,11 +40,13 @@ import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) @@ -50,7 +59,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase { return pluginList(MockTransportService.TestPlugin.class); } - public void testDoNotAllowStaleReplicasToBePromotedToPrimary() throws Exception { + private void createStaleReplicaScenario() throws Exception { logger.info("--> starting 3 nodes, 1 master, 2 data"); String master = internalCluster().startMasterOnlyNode(Settings.EMPTY); internalCluster().startDataOnlyNodesAsync(2).get(); @@ -103,6 +112,10 @@ public class PrimaryAllocationIT extends ESIntegTestCase { assertBusy(() -> assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0))); // kick reroute a second time and check that all shards are unassigned assertThat(client(master).admin().cluster().prepareReroute().get().getState().getRoutingNodes().unassigned().size(), equalTo(2)); + } + + public void testDoNotAllowStaleReplicasToBePromotedToPrimary() throws Exception { + createStaleReplicaScenario(); logger.info("--> starting node that reuses data folder with the up-to-date primary shard"); internalCluster().startDataOnlyNode(Settings.EMPTY); @@ -112,6 +125,67 @@ public class PrimaryAllocationIT extends ESIntegTestCase { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2l); } + public void testFailedAllocationOfStalePrimaryToDataNodeWithNoData() throws Exception { + String dataNodeWithShardCopy = internalCluster().startNode(); + + logger.info("--> create single shard index"); + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get()); + ensureGreen("test"); + + String dataNodeWithNoShardCopy = internalCluster().startNode(); + ensureStableCluster(2); + + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(dataNodeWithShardCopy)); + ensureStableCluster(1); + assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").getShards().get(0).primaryShard().unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NODE_LEFT)); + + logger.info("--> force allocation of stale copy to node that does not have shard copy"); + client().admin().cluster().prepareReroute().add(new AllocateStalePrimaryAllocationCommand(new ShardId("test", 0), dataNodeWithNoShardCopy, true)).get(); + + logger.info("--> wait until shard is failed and becomes unassigned again"); + assertBusy(() -> assertTrue(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").allPrimaryShardsUnassigned())); + assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").getShards().get(0).primaryShard().unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); + } + + public void testForceStaleReplicaToBePromotedToPrimary() throws Exception { + boolean useStaleReplica = randomBoolean(); // if true, use stale replica, otherwise a completely empty copy + createStaleReplicaScenario(); + + logger.info("--> explicitly promote old primary shard"); + ImmutableOpenIntMap> storeStatuses = client().admin().indices().prepareShardStores("test").get().getStoreStatuses().get("test"); + ClusterRerouteRequestBuilder rerouteBuilder = client().admin().cluster().prepareReroute(); + for (IntObjectCursor> shardStoreStatuses : storeStatuses) { + int shardId = shardStoreStatuses.key; + IndicesShardStoresResponse.StoreStatus storeStatus = randomFrom(shardStoreStatuses.value); + logger.info("--> adding allocation command for shard " + shardId); + // force allocation based on node id + if (useStaleReplica) { + rerouteBuilder.add(new AllocateStalePrimaryAllocationCommand(new ShardId("test", shardId), storeStatus.getNode().getId(), true)); + } else { + rerouteBuilder.add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", shardId), storeStatus.getNode().getId(), true)); + } + } + rerouteBuilder.get(); + + logger.info("--> check that the stale primary shard gets allocated and that documents are available"); + ensureYellow("test"); + + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(), useStaleReplica ? 1l : 0l); + } + + public void testForcePrimaryShardIfAllocationDecidersSayNoAfterIndexCreation() throws ExecutionException, InterruptedException { + String node = internalCluster().startNode(); + client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.routing.allocation.exclude._name", node) + .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get(); + + assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().shardRoutingTable("test", 0).assignedShards(), empty()); + + client().admin().cluster().prepareReroute().add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node, true)).get(); + ensureGreen("test"); + } + public void testNotWaitForQuorumCopies() throws Exception { logger.info("--> starting 3 nodes"); internalCluster().startNodesAsync(3).get(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java index 1711b0c33a8..e50272d2b08 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java @@ -69,7 +69,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { public void testNoDelayedUnassigned() throws Exception { AllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "0")) + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "0")) .numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) @@ -97,7 +97,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { public void testDelayedUnassignedScheduleReroute() throws Exception { MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "100ms")) + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")) .numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) @@ -144,9 +144,9 @@ public class RoutingServiceTests extends ESAllocationTestCase { try { MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("short_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "100ms")) + .put(IndexMetaData.builder("short_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")) .numberOfShards(1).numberOfReplicas(1)) - .put(IndexMetaData.builder("long_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10s")) + .put(IndexMetaData.builder("long_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10s")) .numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 20a731b0153..e277080ebf7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -259,7 +259,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { public void testUnassignedDelayedOnlyOnNodeLeft() throws Exception { final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, null); long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay - Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY); + Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY); long cachedDelay = unassignedInfo.getLastComputedLeftDelayNanos(); assertThat(delay, equalTo(cachedDelay)); assertThat(delay, equalTo(TimeValue.timeValueHours(10).nanos() - 1)); @@ -273,7 +273,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { reasons.remove(UnassignedInfo.Reason.NODE_LEFT); UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), reasons), null); long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay - Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY); + Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY); assertThat(delay, equalTo(0l)); delay = unassignedInfo.getLastComputedLeftDelayNanos(); assertThat(delay, equalTo(0l)); @@ -286,7 +286,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { final long baseTime = System.nanoTime(); final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, baseTime, System.currentTimeMillis()); final long totalDelayNanos = TimeValue.timeValueMillis(10).nanos(); - final Settings settings = Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueNanos(totalDelayNanos)).build(); + final Settings settings = Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueNanos(totalDelayNanos)).build(); long delay = unassignedInfo.updateDelay(baseTime, settings, Settings.EMPTY); assertThat(delay, equalTo(totalDelayNanos)); assertThat(delay, equalTo(unassignedInfo.getLastComputedLeftDelayNanos())); @@ -336,8 +336,8 @@ public class UnassignedInfoTests extends ESAllocationTestCase { final long expectMinDelaySettingsNanos = Math.min(delayTest1.nanos(), delayTest2.nanos()); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, delayTest1)).numberOfShards(1).numberOfReplicas(1)) - .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, delayTest2)).numberOfShards(1).numberOfReplicas(1)) + .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayTest1)).numberOfShards(1).numberOfReplicas(1)) + .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayTest2)).numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) .metaData(metaData) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 4c4fa72a6ec..65c2e577015 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -26,7 +26,10 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.allocation.command.AllocateAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AbstractAllocateAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; @@ -38,7 +41,9 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.test.ESAllocationTestCase; import static java.util.Collections.singletonMap; @@ -46,6 +51,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; /** @@ -96,6 +102,14 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node(toNodeId).get(0).state(), equalTo(ShardRoutingState.STARTED)); } + private AbstractAllocateAllocationCommand randomAllocateCommand(ShardId shardId, String node) { + return randomFrom( + new AllocateReplicaAllocationCommand(shardId, node), + new AllocateEmptyPrimaryAllocationCommand(shardId, node, true), + new AllocateStalePrimaryAllocationCommand(shardId, node, true) + ); + } + public void testAllocateCommand() { AllocationService allocation = createAllocationService(settingsBuilder() .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") @@ -106,10 +120,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); + // shard routing is added as "from recovery" instead of "new index creation" so that we can test below that allocating an empty + // primary with accept_data_loss flag set to false fails RoutingTable routingTable = RoutingTable.builder() - .addAsNew(metaData.index("test")) + .addAsRecovery(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); + ShardId shardId = new ShardId("test", 0); logger.info("--> adding 3 nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() @@ -122,22 +139,56 @@ public class AllocationCommandsTests extends ESAllocationTestCase { clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); - logger.info("--> allocating with primary flag set to false, should fail"); + logger.info("--> allocating to non-existent node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node1", false))); - fail(); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(shardId, "node42"))); + fail("expected IllegalArgumentException when allocating to non-existing node"); } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("failed to resolve [node42], no matching nodes")); } logger.info("--> allocating to non-data node, should fail"); try { - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node4", true))); - fail(); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(shardId, "node4"))); + fail("expected IllegalArgumentException when allocating to non-data node"); } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("allocation can only be done on data nodes")); } - logger.info("--> allocating with primary flag set to true"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node1", true))); + logger.info("--> allocating non-existing shard, should fail"); + try { + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test", 1), "node2"))); + fail("expected ShardNotFoundException when allocating non-existing shard"); + } catch (ShardNotFoundException e) { + assertThat(e.getMessage(), containsString("no such shard")); + } + + logger.info("--> allocating non-existing index, should fail"); + try { + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test2", 0), "node2"))); + fail("expected ShardNotFoundException when allocating non-existing index"); + } catch (IndexNotFoundException e) { + assertThat(e.getMessage(), containsString("no such index")); + } + + logger.info("--> allocating empty primary with acceptDataLoss flag set to false"); + try { + allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", false))); + fail("expected IllegalArgumentException when allocating empty primary with acceptDataLoss flag set to false"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true")); + } + + logger.info("--> allocating stale primary with acceptDataLoss flag set to false"); + try { + allocation.reroute(clusterState, new AllocationCommands(new AllocateStalePrimaryAllocationCommand(shardId, "node1", false))); + fail("expected IllegalArgumentException when allocating stale primary with acceptDataLoss flag set to false"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true")); + } + + logger.info("--> allocating empty primary with acceptDataLoss flag set to true"); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", true))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -153,13 +204,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocate the replica shard on the primary shard node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node1", false))); - fail(); + allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node1"))); + fail("expected IllegalArgumentException when allocating replica shard on the primary shard node"); } catch (IllegalArgumentException e) { } logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -178,8 +229,8 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> verify that we fail when there are no unassigned shards"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node3", false))); - fail(); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test", 0), "node3"))); + fail("expected IllegalArgumentException when allocating shard while no unassigned shard available"); } catch (IllegalArgumentException e) { } } @@ -209,8 +260,8 @@ public class AllocationCommandsTests extends ESAllocationTestCase { clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); - logger.info("--> allocating with primary flag set to true"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node1", true))); + logger.info("--> allocating empty primary shard with accept_data_loss flag set to true"); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", true))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -239,7 +290,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { } logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -257,7 +308,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -290,7 +341,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(rerouteResult.changed(), equalTo(true)); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -335,7 +386,9 @@ public class AllocationCommandsTests extends ESAllocationTestCase { public void testSerialization() throws Exception { AllocationCommands commands = new AllocationCommands( - new AllocateAllocationCommand(new ShardId("test", 1), "node1", true), + new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 1), "node1", true), + new AllocateStalePrimaryAllocationCommand(new ShardId("test", 2), "node1", true), + new AllocateReplicaAllocationCommand(new ShardId("test", 2), "node1"), new MoveAllocationCommand(new ShardId("test", 3), "node2", "node3"), new CancelAllocationCommand(new ShardId("test", 4), "node5", true) ); @@ -343,24 +396,33 @@ public class AllocationCommandsTests extends ESAllocationTestCase { AllocationCommands.writeTo(commands, bytes); AllocationCommands sCommands = AllocationCommands.readFrom(StreamInput.wrap(bytes.bytes())); - assertThat(sCommands.commands().size(), equalTo(3)); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1))); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1")); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).allowPrimary(), equalTo(true)); + assertThat(sCommands.commands().size(), equalTo(5)); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1))); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1")); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).acceptDataLoss(), equalTo(true)); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 3))); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).fromNode(), equalTo("node2")); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).toNode(), equalTo("node3")); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).node(), equalTo("node1")); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).acceptDataLoss(), equalTo(true)); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 4))); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node5")); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).allowPrimary(), equalTo(true)); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node1")); + + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(new ShardId("test", 3))); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).fromNode(), equalTo("node2")); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).toNode(), equalTo("node3")); + + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(new ShardId("test", 4))); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).node(), equalTo("node5")); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).allowPrimary(), equalTo(true)); } public void testXContent() throws Exception { String commands = "{\n" + " \"commands\" : [\n" + - " {\"allocate\" : {\"index\" : \"test\", \"shard\" : 1, \"node\" : \"node1\", \"allow_primary\" : true}}\n" + + " {\"allocate_empty_primary\" : {\"index\" : \"test\", \"shard\" : 1, \"node\" : \"node1\", \"accept_data_loss\" : true}}\n" + + " ,{\"allocate_stale_primary\" : {\"index\" : \"test\", \"shard\" : 2, \"node\" : \"node1\", \"accept_data_loss\" : true}}\n" + + " ,{\"allocate_replica\" : {\"index\" : \"test\", \"shard\" : 2, \"node\" : \"node1\"}}\n" + " ,{\"move\" : {\"index\" : \"test\", \"shard\" : 3, \"from_node\" : \"node2\", \"to_node\" : \"node3\"}} \n" + " ,{\"cancel\" : {\"index\" : \"test\", \"shard\" : 4, \"node\" : \"node5\", \"allow_primary\" : true}} \n" + " ]\n" + @@ -371,17 +433,24 @@ public class AllocationCommandsTests extends ESAllocationTestCase { parser.nextToken(); AllocationCommands sCommands = AllocationCommands.fromXContent(parser); - assertThat(sCommands.commands().size(), equalTo(3)); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1))); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1")); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).allowPrimary(), equalTo(true)); + assertThat(sCommands.commands().size(), equalTo(5)); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1))); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1")); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).acceptDataLoss(), equalTo(true)); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 3))); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).fromNode(), equalTo("node2")); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).toNode(), equalTo("node3")); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).node(), equalTo("node1")); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).acceptDataLoss(), equalTo(true)); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 4))); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node5")); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).allowPrimary(), equalTo(true)); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node1")); + + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(new ShardId("test", 3))); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).fromNode(), equalTo("node2")); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).toNode(), equalTo("node3")); + + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(new ShardId("test", 4))); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).node(), equalTo("node5")); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).allowPrimary(), equalTo(true)); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java index fa9f4065dc1..827da901dc9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java @@ -753,7 +753,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(0)) - .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "_id", "node1,node2")).numberOfShards(2).numberOfReplicas(0)) + .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "_id", "node1,node2")).numberOfShards(2).numberOfReplicas(0)) .build(); // we use a second index here (test1) that never gets assigned otherwise allocateUnassinged is never called if we don't have unassigned shards. diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java index dd3f3f373ff..ac539c421b0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java @@ -54,7 +54,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 4) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE, 2))) + .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 2))) .build(); RoutingTable routingTable = RoutingTable.builder() @@ -212,12 +212,12 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { assertThat(shardRouting.index(), equalTo("test1")); } - logger.info("update " + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE + " for test, see that things move"); + logger.info("update " + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey() + " for test, see that things move"); metaData = MetaData.builder(metaData) .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE, 3) + .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 3) )) .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java index be64aafc61e..ae840abd1a8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java @@ -49,14 +49,14 @@ public class EnableAllocationDeciderIT extends ESIntegTestCase { final String secondNode = internalCluster().startNode(); // prevent via index setting but only on index test - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE)).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE)).get(); client().admin().cluster().prepareReroute().get(); ensureGreen(); assertAllShardsOnNodes("test", firstNode); assertAllShardsOnNodes("test_1", firstNode); // now enable the index test to relocate since index settings override cluster settings - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, randomBoolean() ? EnableAllocationDecider.Rebalance.PRIMARIES : EnableAllocationDecider.Rebalance.ALL)).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), randomBoolean() ? EnableAllocationDecider.Rebalance.PRIMARIES : EnableAllocationDecider.Rebalance.ALL)).get(); logger.info("--> balance index [test]"); client().admin().cluster().prepareReroute().get(); ensureGreen("test"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java index b2559c29ed2..62005a3fa1d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java @@ -44,7 +44,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING; -import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.equalTo; @@ -121,7 +121,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("disabled").settings(settings(Version.CURRENT) - .put(INDEX_ROUTING_ALLOCATION_ENABLE, Allocation.NONE.name())) + .put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name())) .numberOfShards(1).numberOfReplicas(1)) .put(IndexMetaData.builder("enabled").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); @@ -163,12 +163,12 @@ public class EnableAllocationTests extends ESAllocationTestCase { .build(); ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); AllocationService strategy = createAllocationService(build, clusterSettings, getRandom()); - Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE).build(); + Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE).build(); logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(indexSettings)).numberOfShards(3).numberOfReplicas(1)) - .put(IndexMetaData.builder("always_disabled").settings(settings(Version.CURRENT).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE)).numberOfShards(1).numberOfReplicas(1)) + .put(IndexMetaData.builder("always_disabled").settings(settings(Version.CURRENT).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE)).numberOfShards(1).numberOfReplicas(1)) .build(); RoutingTable routingTable = RoutingTable.builder() @@ -219,7 +219,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { IndexMetaData meta = clusterState.getMetaData().index("test"); IndexMetaData meta1 = clusterState.getMetaData().index("always_disabled"); clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).removeAllIndices().put(IndexMetaData.builder(meta1)) - .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, allowedOnes).build()))) + .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), allowedOnes).build()))) .build(); } @@ -265,7 +265,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { .build(); ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); AllocationService strategy = createAllocationService(build, clusterSettings, getRandom()); - Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE).build(); + Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE).build(); logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() @@ -312,7 +312,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { prevState = clusterState; IndexMetaData meta = clusterState.getMetaData().index("test"); clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).removeAllIndices() - .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL).build()))).build(); + .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL).build()))).build(); } clusterSettings.applySettings(clusterState.metaData().settings()); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index f2a9a641a80..80df54518ba 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -265,7 +265,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { .get(); fail("bogus value"); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [discovery.zen.commit_timeout] with value [whatever] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "Failed to parse setting [discovery.zen.publish_timeout] with value [whatever] as a time value: unit is missing or unrecognized"); } assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java index 6a16f906886..97e527d1a0a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java @@ -23,8 +23,10 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -62,6 +64,10 @@ public class SettingsFilteringIT extends ESIntegTestCase { return "Settings Filtering Plugin"; } + public void onModule(SettingsModule module) { + module.registerSetting(Setting.groupSetting("index.filter_test.", false, Setting.Scope.INDEX)); + } + @Override public Collection nodeModules() { return Collections.singletonList(new SettingsFilteringModule()); diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsValidatorTests.java b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsValidatorTests.java deleted file mode 100644 index 498acef2eb9..00000000000 --- a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsValidatorTests.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.settings; - -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class SettingsValidatorTests extends ESTestCase { - public void testValidators() throws Exception { - assertThat(Validator.EMPTY.validate("", "anything goes", null), nullValue()); - - assertThat(Validator.TIME.validate("", "10m", null), nullValue()); - assertThat(Validator.TIME.validate("", "10g", null), notNullValue()); - assertThat(Validator.TIME.validate("", "bad timing", null), notNullValue()); - - assertThat(Validator.BYTES_SIZE.validate("", "10m", null), nullValue()); - assertThat(Validator.BYTES_SIZE.validate("", "10g", null), nullValue()); - assertThat(Validator.BYTES_SIZE.validate("", "bad", null), notNullValue()); - - assertThat(Validator.FLOAT.validate("", "10.2", null), nullValue()); - assertThat(Validator.FLOAT.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "10.2", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "0.0", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "-1.0", null), notNullValue()); - assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.DOUBLE.validate("", "10.2", null), nullValue()); - assertThat(Validator.DOUBLE.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.DOUBLE_GTE_2.validate("", "10.2", null), nullValue()); - assertThat(Validator.DOUBLE_GTE_2.validate("", "2.0", null), nullValue()); - assertThat(Validator.DOUBLE_GTE_2.validate("", "1.0", null), notNullValue()); - assertThat(Validator.DOUBLE_GTE_2.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "10.2", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "0.0", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "-1.0", null), notNullValue()); - assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.INTEGER.validate("", "10", null), nullValue()); - assertThat(Validator.INTEGER.validate("", "10.2", null), notNullValue()); - - assertThat(Validator.INTEGER_GTE_2.validate("", "2", null), nullValue()); - assertThat(Validator.INTEGER_GTE_2.validate("", "1", null), notNullValue()); - assertThat(Validator.INTEGER_GTE_2.validate("", "0", null), notNullValue()); - assertThat(Validator.INTEGER_GTE_2.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "2", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "1", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "0", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "-1", null), notNullValue()); - assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "10.2", null), notNullValue()); - - assertThat(Validator.POSITIVE_INTEGER.validate("", "2", null), nullValue()); - assertThat(Validator.POSITIVE_INTEGER.validate("", "1", null), nullValue()); - assertThat(Validator.POSITIVE_INTEGER.validate("", "0", null), notNullValue()); - assertThat(Validator.POSITIVE_INTEGER.validate("", "-1", null), notNullValue()); - assertThat(Validator.POSITIVE_INTEGER.validate("", "10.2", null), notNullValue()); - - assertThat(Validator.PERCENTAGE.validate("", "asdasd", null), notNullValue()); - assertThat(Validator.PERCENTAGE.validate("", "-1", null), notNullValue()); - assertThat(Validator.PERCENTAGE.validate("", "20", null), notNullValue()); - assertThat(Validator.PERCENTAGE.validate("", "-1%", null), notNullValue()); - assertThat(Validator.PERCENTAGE.validate("", "101%", null), notNullValue()); - assertThat(Validator.PERCENTAGE.validate("", "100%", null), nullValue()); - assertThat(Validator.PERCENTAGE.validate("", "99%", null), nullValue()); - assertThat(Validator.PERCENTAGE.validate("", "0%", null), nullValue()); - - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "asdasd", null), notNullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "20", null), notNullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "20mb", null), nullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "-1%", null), notNullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "101%", null), notNullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "100%", null), nullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "99%", null), nullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "0%", null), nullValue()); - } - - public void testDynamicValidators() throws Exception { - DynamicSettings.Builder ds = new DynamicSettings.Builder(); - ds.addSetting("my.test.*", Validator.POSITIVE_INTEGER); - String valid = ds.build().validateDynamicSetting("my.test.setting", "-1", null); - assertThat(valid, equalTo("the value of the setting my.test.setting must be a positive integer")); - } -} diff --git a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java index e5d27b872fb..ca33288e874 100644 --- a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java @@ -28,16 +28,24 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.junit.Assert; import java.io.IOException; +import java.util.Collection; import static org.hamcrest.Matchers.containsString; public class CodecTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testAcceptPostingsFormat() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("postings_format", Codec.getDefault().postingsFormat().getName()).endObject().endObject() diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java index 8f8e8b4ea1c..b10763a95b0 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java @@ -34,6 +34,7 @@ import static org.hamcrest.Matchers.lessThan; * Basic Tests for {@link GeoDistance} */ public class GeoDistanceTests extends ESTestCase { + public void testGeoDistanceSerialization() throws IOException { // make sure that ordinals don't change, because we rely on then in serialization assertThat(GeoDistance.PLANE.ordinal(), equalTo(0)); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 03294b8e49c..279e31aadd4 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.not; public abstract class AbstractShapeBuilderTestCase extends ESTestCase { - private static final int NUMBER_OF_TESTBUILDERS = 100; + private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; /** diff --git a/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java b/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java index 61660f96742..0c14e1a0bcb 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java @@ -22,8 +22,10 @@ package org.elasticsearch.common.lucene; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.index.shard.ShardId; @@ -55,6 +57,25 @@ public class ShardCoreKeyMapTests extends ESTestCase { } } + public void testAddingAClosedReader() throws Exception { + LeafReader reader; + try (Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { + writer.addDocument(new Document()); + try (DirectoryReader dirReader = ElasticsearchDirectoryReader.wrap(writer.getReader(), new ShardId("index1", 1))) { + reader = dirReader.leaves().get(0).reader(); + } + } + ShardCoreKeyMap map = new ShardCoreKeyMap(); + try { + map.add(reader); + fail("Expected AlreadyClosedException"); + } catch (AlreadyClosedException e) { + // What we wanted + } + assertEquals(0, map.size()); + } + public void testBasics() throws IOException { Directory dir1 = newDirectory(); RandomIndexWriter w1 = new RandomIndexWriter(random(), dir1); diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index 798e82a979e..82cabf7ec58 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -104,36 +104,36 @@ public class NetworkModuleTests extends ModuleTestCase { public void testRegisterTransportService() { Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "custom").build(); - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, null); module.registerTransportService("custom", FakeTransportService.class); assertBinding(module, TransportService.class, FakeTransportService.class); // check it works with transport only as well - module = new NetworkModule(new NetworkService(settings), settings, true); + module = new NetworkModule(new NetworkService(settings), settings, true, null); module.registerTransportService("custom", FakeTransportService.class); assertBinding(module, TransportService.class, FakeTransportService.class); } public void testRegisterTransport() { Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "custom").build(); - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, null); module.registerTransport("custom", FakeTransport.class); assertBinding(module, Transport.class, FakeTransport.class); // check it works with transport only as well - module = new NetworkModule(new NetworkService(settings), settings, true); + module = new NetworkModule(new NetworkService(settings), settings, true, null); module.registerTransport("custom", FakeTransport.class); assertBinding(module, Transport.class, FakeTransport.class); } public void testRegisterHttpTransport() { Settings settings = Settings.builder().put(NetworkModule.HTTP_TYPE_KEY, "custom").build(); - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, null); module.registerHttpTransport("custom", FakeHttpTransport.class); assertBinding(module, HttpServerTransport.class, FakeHttpTransport.class); // check registration not allowed for transport only - module = new NetworkModule(new NetworkService(settings), settings, true); + module = new NetworkModule(new NetworkService(settings), settings, true, null); try { module.registerHttpTransport("custom", FakeHttpTransport.class); fail(); @@ -144,19 +144,19 @@ public class NetworkModuleTests extends ModuleTestCase { // not added if http is disabled settings = Settings.builder().put(NetworkModule.HTTP_ENABLED, false).build(); - module = new NetworkModule(new NetworkService(settings), settings, false); + module = new NetworkModule(new NetworkService(settings), settings, false, null); assertNotBound(module, HttpServerTransport.class); } public void testRegisterRestHandler() { Settings settings = Settings.EMPTY; - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, null); module.registerRestHandler(FakeRestHandler.class); // also check a builtin is bound assertSetMultiBinding(module, RestHandler.class, FakeRestHandler.class, RestMainAction.class); // check registration not allowed for transport only - module = new NetworkModule(new NetworkService(settings), settings, true); + module = new NetworkModule(new NetworkService(settings), settings, true, null); try { module.registerRestHandler(FakeRestHandler.class); fail(); @@ -168,7 +168,7 @@ public class NetworkModuleTests extends ModuleTestCase { public void testRegisterCatRestHandler() { Settings settings = Settings.EMPTY; - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, null); module.registerRestHandler(FakeCatRestHandler.class); // also check a builtin is bound assertSetMultiBinding(module, AbstractCatAction.class, FakeCatRestHandler.class, RestNodesAction.class); diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 97393c51b8d..088d4fa5ac6 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -18,8 +18,11 @@ */ package org.elasticsearch.common.settings; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; @@ -165,4 +168,61 @@ public class ScopedSettingsTests extends ESTestCase { assertTrue(ref.get().contains("internal:index/shard/recovery/*")); assertTrue(ref.get().contains("internal:gateway/local*")); } + + public void testGetSetting() { + IndexScopedSettings settings = new IndexScopedSettings( + Settings.EMPTY, + IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + IndexScopedSettings copy = settings.copy(Settings.builder().put("index.store.type", "boom").build(), newIndexMeta("foo", Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 3).build())); + assertEquals(3, copy.get(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING).intValue()); + assertEquals(1, copy.get(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING).intValue()); + assertEquals("boom", copy.get(IndexModule.INDEX_STORE_TYPE_SETTING)); // test fallback to node settings + } + + public void testValidate() { + IndexScopedSettings settings = new IndexScopedSettings( + Settings.EMPTY, + IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + settings.validate(Settings.builder().put("index.store.type", "boom")); + settings.validate(Settings.builder().put("index.store.type", "boom").build()); + try { + settings.validate(Settings.builder().put("index.store.type", "boom", "i.am.not.a.setting", true)); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("unknown setting [i.am.not.a.setting]", e.getMessage()); + } + + try { + settings.validate(Settings.builder().put("index.store.type", "boom", "i.am.not.a.setting", true).build()); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("unknown setting [i.am.not.a.setting]", e.getMessage()); + } + + try { + settings.validate(Settings.builder().put("index.store.type", "boom", "index.number_of_replicas", true).build()); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage()); + } + + try { + settings.validate("index.number_of_replicas", Settings.builder().put("index.number_of_replicas", "true").build()); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage()); + } + } + + + public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { + Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(indexSettings) + .build(); + IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); + return metaData; + } + } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 71914444725..cccfa373200 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -45,7 +45,7 @@ public class SettingTests extends ESTestCase { ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 1024); AtomicReference value = new AtomicReference<>(null); - ClusterSettings.SettingUpdater settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger); + ClusterSettings.SettingUpdater settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger); try { settingUpdater.apply(Settings.builder().put("a.byte.size", 12).build(), Settings.EMPTY); fail("no unit"); @@ -60,7 +60,7 @@ public class SettingTests extends ESTestCase { public void testSimpleUpdate() { Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); AtomicReference atomicBoolean = new AtomicReference<>(null); - ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger); + ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger); Settings build = Settings.builder().put("foo.bar", false).build(); settingUpdater.apply(build, Settings.EMPTY); assertNull(atomicBoolean.get()); @@ -94,8 +94,7 @@ public class SettingTests extends ESTestCase { Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); AtomicReference ab1 = new AtomicReference<>(null); AtomicReference ab2 = new AtomicReference<>(null); - ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger); - ClusterSettings.SettingUpdater settingUpdater2 = booleanSetting.newUpdater(ab2::set, logger); + ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger); settingUpdater.apply(Settings.builder().put("foo.bar", true).build(), Settings.EMPTY); assertTrue(ab1.get()); assertNull(ab2.get()); @@ -120,7 +119,7 @@ public class SettingTests extends ESTestCase { assertFalse(setting.isGroupSetting()); ref.set(setting.get(Settings.EMPTY)); ComplexType type = ref.get(); - ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); + ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY)); assertSame("no update - type has not changed", type, ref.get()); @@ -147,7 +146,7 @@ public class SettingTests extends ESTestCase { AtomicReference ref = new AtomicReference<>(null); Setting setting = Setting.groupSetting("foo.bar.", true, Setting.Scope.CLUSTER); assertTrue(setting.isGroupSetting()); - ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); + ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); Settings currentInput = Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build(); Settings previousInput = Settings.EMPTY; @@ -191,7 +190,7 @@ public class SettingTests extends ESTestCase { assertTrue(setting.match("foo.bar.baz")); assertFalse(setting.match("foo.baz.bar")); - ClusterSettings.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger,(s) -> assertFalse(true)); + ClusterSettings.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger,(s) -> assertFalse(true)); try { predicateSettingUpdater.apply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build(), Settings.EMPTY); fail("not accepted"); @@ -273,7 +272,7 @@ public class SettingTests extends ESTestCase { assertArrayEquals(value.toArray(new String[0]), input.toArray(new String[0])); AtomicReference> ref = new AtomicReference<>(); - AbstractScopedSettings.SettingUpdater settingUpdater = listSetting.newUpdater(ref::set, logger); + AbstractScopedSettings.SettingUpdater> settingUpdater = listSetting.newUpdater(ref::set, logger); assertTrue(settingUpdater.hasChanged(builder.build(), Settings.EMPTY)); settingUpdater.apply(builder.build(), Settings.EMPTY); assertEquals(input.size(), ref.get().size()); diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java index df65d647265..b0bbf0f47f2 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java @@ -25,14 +25,17 @@ import org.elasticsearch.test.ESTestCase; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + public class PrioritizedRunnableTests extends ESTestCase { + + // test unit conversion with a controlled clock public void testGetAgeInMillis() throws Exception { AtomicLong time = new AtomicLong(); PrioritizedRunnable runnable = new PrioritizedRunnable(Priority.NORMAL, time::get) { @Override public void run() { - } }; assertEquals(0, runnable.getAgeInMillis()); @@ -40,4 +43,27 @@ public class PrioritizedRunnableTests extends ESTestCase { time.addAndGet(TimeUnit.NANOSECONDS.convert(milliseconds, TimeUnit.MILLISECONDS)); assertEquals(milliseconds, runnable.getAgeInMillis()); } + + // test age advances with System#nanoTime + public void testGetAgeInMillisWithRealClock() throws InterruptedException { + long nanosecondsInMillisecond = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS); + PrioritizedRunnable runnable = new PrioritizedRunnable(Priority.NORMAL) { + @Override + public void run() { + } + }; + + // force at least one millisecond to elapse, but ensure the + // clock has enough resolution to observe the passage of time + long start = System.nanoTime(); + long elapsed; + while ((elapsed = (System.nanoTime() - start)) < nanosecondsInMillisecond) { + // busy spin + } + + // creation happened before start, so age will be at least as + // large as elapsed + assertThat(runnable.getAgeInMillis(), greaterThanOrEqualTo(TimeUnit.MILLISECONDS.convert(elapsed, TimeUnit.NANOSECONDS))); + } + } diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 67d3df42b38..e9fa8e495d9 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.discovery; +import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.get.GetResponse; @@ -30,12 +31,15 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.Murmur3HashFunction; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; @@ -96,6 +100,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -883,6 +888,71 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { internalCluster().stopRandomNonMasterNode(); } + // simulate handling of sending shard failure during an isolation + public void testSendingShardFailure() throws Exception { + List nodes = startCluster(3, 2); + String masterNode = internalCluster().getMasterName(); + List nonMasterNodes = nodes.stream().filter(node -> !node.equals(masterNode)).collect(Collectors.toList()); + String nonMasterNode = randomFrom(nonMasterNodes); + assertAcked(prepareCreate("test") + .setSettings(Settings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 3) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2) + )); + ensureGreen(); + String nonMasterNodeId = internalCluster().clusterService(nonMasterNode).localNode().getId(); + + // fail a random shard + ShardRouting failedShard = + randomFrom(clusterService().state().getRoutingNodes().node(nonMasterNodeId).shardsWithState(ShardRoutingState.STARTED)); + ShardStateAction service = internalCluster().getInstance(ShardStateAction.class, nonMasterNode); + String indexUUID = clusterService().state().metaData().index("test").getIndexUUID(); + CountDownLatch latch = new CountDownLatch(1); + AtomicBoolean success = new AtomicBoolean(); + + String isolatedNode = randomBoolean() ? masterNode : nonMasterNode; + NetworkPartition networkPartition = addRandomIsolation(isolatedNode); + networkPartition.startDisrupting(); + + service.shardFailed(failedShard, indexUUID, "simulated", new CorruptIndexException("simulated", (String) null), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + success.set(true); + latch.countDown(); + } + + @Override + public void onFailure(Throwable t) { + success.set(false); + latch.countDown(); + assert false; + } + }); + + if (isolatedNode.equals(nonMasterNode)) { + assertNoMaster(nonMasterNode); + } else { + ensureStableCluster(2, nonMasterNode); + } + + // heal the partition + networkPartition.removeAndEnsureHealthy(internalCluster()); + + // the cluster should stabilize + ensureStableCluster(3); + + latch.await(); + + // the listener should be notified + assertTrue(success.get()); + + // the failed shard should be gone + List shards = clusterService().state().getRoutingTable().allShards("test"); + for (ShardRouting shard : shards) { + assertThat(shard.allocationId(), not(equalTo(failedShard.allocationId()))); + } + } + public void testClusterFormingWithASlowNode() throws Exception { configureUnicastCluster(3, null, 2); diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java index 1c3ec79dd94..d2f7bb888cd 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java @@ -64,7 +64,7 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase { String node2 = nodeName2.get(); String index = "index"; - assertAcked(prepareCreate(index).setSettings(Settings.builder().put("index.number_of_replicas", 0).put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "_name", node1))); + assertAcked(prepareCreate(index).setSettings(Settings.builder().put("index.number_of_replicas", 0).put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "_name", node1))); index(index, "doc", "1", jsonBuilder().startObject().field("text", "some text").endObject()); ensureGreen(); assertIndexInMetaState(node1, index); @@ -72,7 +72,7 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase { assertIndexInMetaState(masterNode, index); logger.debug("relocating index..."); - client().admin().indices().prepareUpdateSettings(index).setSettings(Settings.builder().put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "_name", node2)).get(); + client().admin().indices().prepareUpdateSettings(index).setSettings(Settings.builder().put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "_name", node2)).get(); client().admin().cluster().prepareHealth().setWaitForRelocatingShards(0).get(); ensureGreen(); assertIndexDirectoryDeleted(node1, index); diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java index dbdf747de63..87a10625c5b 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java @@ -60,7 +60,7 @@ public class RecoveryBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testReusePeerRecovery() throws Exception { assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE))); + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE))); logger.info("--> indexing docs"); int numDocs = scaledRandomIntBetween(100, 1000); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index f0650a1cbda..d26f0fbf412 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.recovery.RecoveryState; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -39,6 +40,8 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSDirectoryService; import org.elasticsearch.test.store.MockFSIndexStore; +import java.util.Collection; + import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -54,6 +57,12 @@ import static org.hamcrest.Matchers.notNullValue; @ClusterScope(numDataNodes = 0, scope = Scope.TEST) public class RecoveryFromGatewayIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(MockFSIndexStore.TestPlugin.class); + } + public void testOneNodeRecoverFromGateway() throws Exception { internalCluster().startNode(); @@ -322,11 +331,11 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { public void testReusePeerRecovery() throws Exception { final Settings settings = settingsBuilder() .put("action.admin.cluster.node.shutdown.delay", "10ms") - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING, false) .put("gateway.recover_after_nodes", 4) .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, 4) .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, 4) - .put(MockFSDirectoryService.CRASH_INDEX, false).build(); + .put(MockFSDirectoryService.CRASH_INDEX_SETTING.getKey(), false).build(); internalCluster().startNodesAsync(4, settings).get(); // prevent any rebalance actions during the peer recovery @@ -334,7 +343,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { // we reuse the files on disk after full restarts for replicas. assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(indexSettings()) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE))); + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE))); ensureGreen(); logger.info("--> indexing docs"); for (int i = 0; i < 1000; i++) { diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index 0a6ddca5d24..87d83ed4181 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -228,7 +228,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testDelayedAllocation() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), - Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); + Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); if (randomBoolean()) { // we sometime return empty list of files, make sure we test this as well @@ -241,7 +241,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), - Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); + Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); testAllocator.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); AllocationService.updateLeftDelayOfUnassignedShards(allocation, Settings.EMPTY); changed = testAllocator.allocateUnassigned(allocation); diff --git a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java index aca3906d185..e9d6154f71a 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java @@ -65,7 +65,7 @@ public class ReusePeerRecoverySharedTest { * for replicas. */ assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put(indexSettings) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE))); + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE))); client().admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("30s").get(); logger.info("--> indexing docs"); for (int i = 0; i < 1000; i++) { diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index cce4c0d22c4..6cc6def4931 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -40,10 +40,13 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Map; @@ -62,6 +65,12 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; public class GetActionIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + public void testSimpleGet() { assertAcked(prepareCreate("test") .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)) diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index b83af9c0960..eae3e65c406 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -18,6 +18,15 @@ */ package org.elasticsearch.index; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInvertState; @@ -33,7 +42,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; @@ -51,7 +60,6 @@ import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStoreConfig; -import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -70,14 +78,7 @@ import org.elasticsearch.test.engine.MockEngineFactory; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; +import static java.util.Collections.emptyMap; public class IndexModuleTests extends ESTestCase { private Index index; @@ -108,7 +109,7 @@ public class IndexModuleTests extends ESTestCase { Set scriptEngines = new HashSet<>(); scriptEngines.addAll(Arrays.asList(scriptEngineServices)); ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), new ScriptContextRegistry(Collections.emptyList())); - IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, Collections.emptySet(), new NamedWriteableRegistry()); + IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, emptyMap()); return new NodeServicesProvider(threadPool, indicesQueryCache, null, warmer, bigArrays, client, scriptService, indicesQueriesRegistry, indicesFieldDataCache, circuitBreakerService); } @@ -147,7 +148,7 @@ public class IndexModuleTests extends ESTestCase { public void testRegisterIndexStore() throws IOException { final Index index = new Index("foo"); - final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).put(IndexModule.STORE_TYPE, "foo_store").build(); + final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store").build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); module.addIndexStore("foo_store", FooStore::new); @@ -173,13 +174,11 @@ public class IndexModuleTests extends ESTestCase { IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); Consumer listener = (s) -> {}; - module.addIndexSettingsListener(listener); module.addIndexEventListener(eventListener); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry); IndexSettings x = indexService.getIndexSettings(); assertEquals(x.getSettings().getAsMap(), indexSettings.getSettings().getAsMap()); assertEquals(x.getIndex(), index); - assertSame(x.getUpdateListeners().get(0), listener); indexService.getIndexEventListener().beforeIndexDeleted(null); assertTrue(atomicBoolean.get()); indexService.close("simon says", false); @@ -187,28 +186,22 @@ public class IndexModuleTests extends ESTestCase { public void testListener() throws IOException { - IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); - Consumer listener = (s) -> { - }; - module.addIndexSettingsListener(listener); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.INDEX); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, new AnalysisRegistry(null, environment)); + Setting booleanSetting2 = Setting.boolSetting("foo.bar.baz", false, true, Setting.Scope.INDEX); + AtomicBoolean atomicBoolean = new AtomicBoolean(false); + module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); try { - module.addIndexSettingsListener(listener); - fail("already added"); - } catch (IllegalStateException ex) { - - } - - try { - module.addIndexSettingsListener(null); - fail("must not be null"); + module.addSettingsUpdateConsumer(booleanSetting2, atomicBoolean::set); + fail("not registered"); } catch (IllegalArgumentException ex) { } + IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry); - IndexSettings x = indexService.getIndexSettings(); - assertEquals(1, x.getUpdateListeners().size()); - assertSame(x.getUpdateListeners().get(0), listener); + assertSame(booleanSetting, indexService.getIndexSettings().getScopedSettings().get(booleanSetting.getKey())); + indexService.close("simon says", false); } @@ -298,7 +291,7 @@ public class IndexModuleTests extends ESTestCase { public void testRegisterCustomQueryCache() throws IOException { Settings indexSettings = Settings.settingsBuilder() - .put(IndexModule.QUERY_CACHE_TYPE, "custom") + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), "custom") .put("path.home", createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); @@ -374,8 +367,6 @@ public class IndexModuleTests extends ESTestCase { } } - - public static final class FooStore extends IndexStore { public FooStore(IndexSettings indexSettings, IndexStoreConfig config) { diff --git a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 4c892849ccd..15f9a3e78b3 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -68,12 +68,6 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertTrue("shadow replicas for replica shards with shadow settings",IndexService.useShadowEngine(false, shadowSettings)); } - public IndexService newIndexService() { - Settings settings = Settings.builder().put("name", "indexServiceTests").build(); - return createIndex("test", settings); - } - - public static CompressedXContent filter(QueryBuilder filterBuilder) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -82,7 +76,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testFilteringAliases() throws Exception { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "dogs", filter(termQuery("animal", "dog"))); @@ -106,7 +100,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testAliasFilters() throws Exception { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); @@ -123,7 +117,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testRemovedAliasFilter() throws Exception { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); @@ -137,7 +131,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testUnknownAliasFilter() throws Exception { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); @@ -162,7 +156,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testBaseAsyncTask() throws InterruptedException, IOException { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); AtomicReference latch = new AtomicReference<>(new CountDownLatch(1)); AtomicReference latch2 = new AtomicReference<>(new CountDownLatch(1)); final AtomicInteger count = new AtomicInteger(); @@ -221,13 +215,13 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testRefreshTaskIsUpdated() throws IOException { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexService.AsyncRefreshTask refreshTask = indexService.getRefreshTask(); assertEquals(1000, refreshTask.getInterval().millis()); assertTrue(indexService.getRefreshTask().mustReschedule()); // now disable - IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, -1)).build(); + IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1)).build(); indexService.updateMetaData(metaData); assertNotSame(refreshTask, indexService.getRefreshTask()); assertTrue(refreshTask.isClosed()); @@ -235,7 +229,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertFalse(indexService.getRefreshTask().mustReschedule()); // set it to 100ms - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, "100ms")).build(); + metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "100ms")).build(); indexService.updateMetaData(metaData); assertNotSame(refreshTask, indexService.getRefreshTask()); assertTrue(refreshTask.isClosed()); @@ -246,7 +240,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertEquals(100, refreshTask.getInterval().millis()); // set it to 200ms - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, "200ms")).build(); + metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "200ms")).build(); indexService.updateMetaData(metaData); assertNotSame(refreshTask, indexService.getRefreshTask()); assertTrue(refreshTask.isClosed()); @@ -257,7 +251,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertEquals(200, refreshTask.getInterval().millis()); // set it to 200ms again - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, "200ms")).build(); + metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "200ms")).build(); indexService.updateMetaData(metaData); assertSame(refreshTask, indexService.getRefreshTask()); assertTrue(indexService.getRefreshTask().mustReschedule()); @@ -270,7 +264,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testFsyncTaskIsRunning() throws IOException { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexService.AsyncTranslogFSync fsyncTask = indexService.getFsyncTask(); assertNotNull(fsyncTask); assertEquals(5000, fsyncTask.getInterval().millis()); @@ -283,14 +277,14 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testRefreshActuallyWorks() throws Exception { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); ensureGreen("test"); IndexService.AsyncRefreshTask refreshTask = indexService.getRefreshTask(); assertEquals(1000, refreshTask.getInterval().millis()); assertTrue(indexService.getRefreshTask().mustReschedule()); // now disable - IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, -1)).build(); + IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1)).build(); indexService.updateMetaData(metaData); client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}").get(); IndexShard shard = indexService.getShard(0); @@ -299,7 +293,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertEquals(0, search.totalHits); } // refresh every millisecond - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, "1ms")).build(); + metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1ms")).build(); indexService.updateMetaData(metaData); assertBusy(() -> { try (Engine.Searcher searcher = shard.acquireSearcher("test")) { @@ -313,8 +307,8 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testAsyncFsyncActuallyWorks() throws Exception { Settings settings = Settings.builder() - .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, "10ms") // very often :) - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC) + .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "10ms") // very often :) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC) .build(); IndexService indexService = createIndex("test", settings); ensureGreen("test"); @@ -328,7 +322,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testNoFsyncTaskIfDisabled() { Settings settings = Settings.builder() - .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, "0ms") // disable + .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "0ms") // disable .build(); IndexService indexService = createIndex("test", settings); assertNull(indexService.getFsyncTask()); diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 316badf376b..56179d5390b 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -20,16 +20,22 @@ package org.elasticsearch.index; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Consumer; +import java.util.function.Function; public class IndexSettingsTests extends ESTestCase { @@ -38,13 +44,14 @@ public class IndexSettingsTests extends ESTestCase { Version version = VersionUtils.getPreviousVersion(); Settings theSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); final AtomicInteger integer = new AtomicInteger(0); - Consumer settingsConsumer = (s) -> integer.set(s.getAsInt("index.test.setting.int", -1)); + Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, true, Setting.Scope.INDEX); IndexMetaData metaData = newIndexMeta("index", theSettings); - IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.singleton(settingsConsumer)); + IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting); + settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set); + assertEquals(version, settings.getIndexVersionCreated()); assertEquals("0xdeadbeef", settings.getUUID()); - assertEquals(1, settings.getUpdateListeners().size()); assertFalse(settings.updateIndexMetaData(metaData)); assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap()); assertEquals(0, integer.get()); @@ -58,11 +65,12 @@ public class IndexSettingsTests extends ESTestCase { .put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); final AtomicInteger integer = new AtomicInteger(0); final StringBuilder builder = new StringBuilder(); - Consumer settingsConsumer = (s) -> { - integer.set(s.getAsInt("index.test.setting.int", -1)); - builder.append(s.get("index.not.updated", "")); - }; - IndexSettings settings = new IndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, Collections.singleton(settingsConsumer)); + Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, true, Setting.Scope.INDEX); + Setting notUpdated = new Setting<>("index.not.updated", "", Function.identity(), true, Setting.Scope.INDEX); + + IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting, notUpdated); + settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set); + settings.getScopedSettings().addSettingsUpdateConsumer(notUpdated, builder::append); assertEquals(0, integer.get()); assertEquals("", builder.toString()); IndexMetaData newMetaData = newIndexMeta("index", Settings.builder().put(settings.getIndexMetaData().getSettings()).put("index.test.setting.int", 42).build()); @@ -73,30 +81,14 @@ public class IndexSettingsTests extends ESTestCase { integer.set(0); assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(settings.getIndexMetaData().getSettings()).put("index.not.updated", "boom").build()))); assertEquals("boom", builder.toString()); - assertEquals(42, integer.get()); + assertEquals("not updated - we preserve the old settings", 0, integer.get()); } - public void testListenerCanThrowException() { - Version version = VersionUtils.getPreviousVersion(); - Settings theSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); - final AtomicInteger integer = new AtomicInteger(0); - Consumer settingsConsumer = (s) -> integer.set(s.getAsInt("index.test.setting.int", -1)); - Consumer exceptionConsumer = (s) -> {throw new RuntimeException("boom");}; - List> list = new ArrayList<>(); - list.add(settingsConsumer); - list.add(exceptionConsumer); - Collections.shuffle(list, random()); - IndexSettings settings = new IndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, list); - assertEquals(0, integer.get()); - assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 42).build()))); - assertEquals(42, integer.get()); - } - public void testSettingsConsistency() { Version version = VersionUtils.getPreviousVersion(); IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()); - IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); assertEquals(version, settings.getIndexVersionCreated()); assertEquals("_na_", settings.getUUID()); try { @@ -107,7 +99,7 @@ public class IndexSettingsTests extends ESTestCase { } metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build()); - settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + settings = new IndexSettings(metaData, Settings.EMPTY); try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("index.test.setting.int", 42).build())); fail("uuid missing/change"); @@ -117,24 +109,31 @@ public class IndexSettingsTests extends ESTestCase { assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap()); } + public IndexSettings newIndexSettings(IndexMetaData metaData, Settings nodeSettings, Setting... settings) { + Set> settingSet = new HashSet<>(IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + if (settings.length > 0) { + settingSet.addAll(Arrays.asList(settings)); + } + return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex()), new IndexScopedSettings(Settings.EMPTY, settingSet)); + } + public void testNodeSettingsAreContained() { final int numShards = randomIntBetween(1, 10); final int numReplicas = randomIntBetween(0, 10); Settings theSettings = Settings.settingsBuilder(). - put("index.foo.bar", 42) + put("index.foo.bar", 0) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numReplicas) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards).build(); - Settings nodeSettings = Settings.settingsBuilder().put("node.foo.bar", 43).build(); + Settings nodeSettings = Settings.settingsBuilder().put("index.foo.bar", 43).build(); final AtomicInteger indexValue = new AtomicInteger(0); - final AtomicInteger nodeValue = new AtomicInteger(0); - Consumer settingsConsumer = (s) -> {indexValue.set(s.getAsInt("index.foo.bar", -1)); nodeValue.set(s.getAsInt("node.foo.bar", -1));}; - IndexSettings settings = new IndexSettings(newIndexMeta("index", theSettings), nodeSettings, Collections.singleton(settingsConsumer)); + Setting integerSetting = Setting.intSetting("index.foo.bar", -1, true, Setting.Scope.INDEX); + IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), nodeSettings, integerSetting); + settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, indexValue::set); assertEquals(numReplicas, settings.getNumberOfReplicas()); assertEquals(numShards, settings.getNumberOfShards()); assertEquals(0, indexValue.get()); - assertEquals(0, nodeValue.get()); assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.settingsBuilder(). put("index.foo.bar", 42) @@ -142,13 +141,16 @@ public class IndexSettingsTests extends ESTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards).build()))); assertEquals(42, indexValue.get()); - assertEquals(43, nodeValue.get()); assertSame(nodeSettings, settings.getNodeSettings()); + assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numReplicas + 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards).build()))); + assertEquals(43, indexValue.get()); } - private IndexMetaData newIndexMeta(String name, Settings indexSettings) { + public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) @@ -162,19 +164,124 @@ public class IndexSettingsTests extends ESTestCase { public void testUpdateDurability() { IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, "async") + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async") .build()); - IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); assertEquals(Translog.Durability.ASYNC, settings.getTranslogDurability()); - settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, "request").build())); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "request").build())); assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability()); metaData = newIndexMeta("index", Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build()); - settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + settings = new IndexSettings(metaData, Settings.EMPTY); assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability()); // test default } + public void testIsWarmerEnabled() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_WARMER_ENABLED_SETTING.getKey(), false) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertFalse(settings.isWarmerEnabled()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_WARMER_ENABLED_SETTING.getKey(), "true").build())); + assertTrue(settings.isWarmerEnabled()); + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + assertTrue(settings.isWarmerEnabled()); + } + public void testRefreshInterval() { + String refreshInterval = getRandomTimeString(); + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(TimeValue.parseTimeValue(refreshInterval, new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), settings.getRefreshInterval()); + String newRefreshInterval = getRandomTimeString(); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), newRefreshInterval).build())); + assertEquals(TimeValue.parseTimeValue(newRefreshInterval, new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), settings.getRefreshInterval()); + } + + private String getRandomTimeString() { + int refreshIntervalInt= randomFrom(-1, Math.abs(randomInt())); + String refreshInterval = Integer.toString(refreshIntervalInt); + if (refreshIntervalInt >= 0) { + refreshInterval += randomFrom("s", "ms", "h"); + } + return refreshInterval; + } + + public void testMaxResultWindow() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), 15) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(15, settings.getMaxResultWindow()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), 42).build())); + assertEquals(42, settings.getMaxResultWindow()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertEquals(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY).intValue(), settings.getMaxResultWindow()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY).intValue(), settings.getMaxResultWindow()); + } + + public void testGCDeletesSetting() { + TimeValue gcDeleteSetting = new TimeValue(Math.abs(randomInt()), TimeUnit.MILLISECONDS); + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), gcDeleteSetting.getStringRep()) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(TimeValue.parseTimeValue(gcDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis()); + TimeValue newGCDeleteSetting = new TimeValue(Math.abs(randomInt()), TimeUnit.MILLISECONDS); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), newGCDeleteSetting.getStringRep()).build())); + assertEquals(TimeValue.parseTimeValue(newGCDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), randomBoolean() ? -1 : new TimeValue(-1, TimeUnit.MILLISECONDS)).build())); + assertEquals(-1, settings.getGcDeletesInMillis()); + } + + public void testIsTTLPurgeDisabled() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING.getKey(), false) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertFalse(settings.isTTLPurgeDisabled()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING.getKey(), "true").build())); + assertTrue(settings.isTTLPurgeDisabled()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertFalse("reset to default", settings.isTTLPurgeDisabled()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + assertFalse(settings.isTTLPurgeDisabled()); + } + + public void testTranslogFlushSizeThreshold() { + ByteSizeValue translogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt())); + ByteSizeValue actualValue = ByteSizeValue.parseBytesSizeValue(translogFlushThresholdSize.toString(), IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey()); + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), translogFlushThresholdSize.toString()) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(actualValue, settings.getFlushThresholdSize()); + ByteSizeValue newTranslogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt())); + ByteSizeValue actualNewTranslogFlushThresholdSize = ByteSizeValue.parseBytesSizeValue(newTranslogFlushThresholdSize.toString(), IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), newTranslogFlushThresholdSize.toString()).build())); + assertEquals(actualNewTranslogFlushThresholdSize, settings.getFlushThresholdSize()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 69f2e0a6d4a..7012ebbc5a8 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -181,7 +181,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { Settings idxSettings = Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2) - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) .put(IndexMetaData.SETTING_DATA_PATH, dataPath.toAbsolutePath().toString()) .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) diff --git a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index e39c0a805fe..8fd6876b4b2 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -22,7 +22,11 @@ package org.elasticsearch.index; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.IntField; import org.apache.lucene.document.StringField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexingSlowLog.SlowLogParsedDocumentPrinter; import org.elasticsearch.index.mapper.ParsedDocument; @@ -51,4 +55,155 @@ public class IndexingSlowLogTests extends ESTestCase { p = new SlowLogParsedDocumentPrinter(pd, 10, true, 3); assertThat(p.toString(), containsString("source[{\"f]")); } + + public void testReformatSetting() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), false) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + IndexingSlowLog log = new IndexingSlowLog(settings); + assertFalse(log.isReformat()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), "true").build())); + assertTrue(log.isReformat()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), "false").build())); + assertFalse(log.isReformat()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertTrue(log.isReformat()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new IndexingSlowLog(settings); + assertTrue(log.isReformat()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), "NOT A BOOLEAN").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse value [NOT A BOOLEAN] cannot be parsed to boolean [ true/1/on/yes OR false/0/off/no ]"); + } + assertTrue(log.isReformat()); + } + + public void testLevelSetting() { + SlowLogLevel level = randomFrom(SlowLogLevel.values()); + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + IndexingSlowLog log = new IndexingSlowLog(settings); + assertEquals(level, log.getLevel()); + level = randomFrom(SlowLogLevel.values()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level).build())); + assertEquals(level, log.getLevel()); + level = randomFrom(SlowLogLevel.values()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level).build())); + assertEquals(level, log.getLevel()); + + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level).build())); + assertEquals(level, log.getLevel()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertEquals(SlowLogLevel.TRACE, log.getLevel()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new IndexingSlowLog(settings); + assertTrue(log.isReformat()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), "NOT A LEVEL").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "No enum constant org.elasticsearch.index.SlowLogLevel.NOT A LEVEL"); + } + assertEquals(SlowLogLevel.TRACE, log.getLevel()); + } + + public void testSetLevels() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "100ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "200ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "300ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "400ms") + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + IndexingSlowLog log = new IndexingSlowLog(settings); + assertEquals(TimeValue.timeValueMillis(100).nanos(), log.getIndexTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(200).nanos(), log.getIndexDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(300).nanos(), log.getIndexInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(400).nanos(), log.getIndexWarnThreshold()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "120ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "220ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "320ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "420ms").build())); + + + assertEquals(TimeValue.timeValueMillis(120).nanos(), log.getIndexTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(220).nanos(), log.getIndexDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(320).nanos(), log.getIndexInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(420).nanos(), log.getIndexWarnThreshold()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings.updateIndexMetaData(metaData); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexWarnThreshold()); + + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new IndexingSlowLog(settings); + + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexWarnThreshold()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + } + + private IndexMetaData newIndexMeta(String name, Settings indexSettings) { + Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(indexSettings) + .build(); + IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); + return metaData; + } } diff --git a/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java b/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java index afc034844ef..1e3e51c0838 100644 --- a/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.index; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.TieredMergePolicy; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -31,106 +33,106 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; +import static org.elasticsearch.index.IndexSettingsTests.newIndexMeta; import static org.hamcrest.Matchers.equalTo; public class MergePolicySettingsTests extends ESTestCase { protected final ShardId shardId = new ShardId(new Index("index"), 1); public void testCompoundFileSettings() throws IOException { - assertThat(new MergePolicyConfig(logger, EMPTY_SETTINGS).getMergePolicy().getNoCFSRatio(), equalTo(0.1)); - assertThat(new MergePolicyConfig(logger, build(true)).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); - assertThat(new MergePolicyConfig(logger, build(0.5)).getMergePolicy().getNoCFSRatio(), equalTo(0.5)); - assertThat(new MergePolicyConfig(logger, build(1.0)).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); - assertThat(new MergePolicyConfig(logger, build("true")).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); - assertThat(new MergePolicyConfig(logger, build("True")).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); - assertThat(new MergePolicyConfig(logger, build("False")).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); - assertThat(new MergePolicyConfig(logger, build("false")).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); - assertThat(new MergePolicyConfig(logger, build(false)).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); - assertThat(new MergePolicyConfig(logger, build(0)).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); - assertThat(new MergePolicyConfig(logger, build(0.0)).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(Settings.EMPTY)).getMergePolicy().getNoCFSRatio(), equalTo(0.1)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(true))).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(0.5))).getMergePolicy().getNoCFSRatio(), equalTo(0.5)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(1.0))).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build("true"))).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build("True"))).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build("False"))).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build("false"))).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(false))).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(0))).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(0.0))).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + } + + private static IndexSettings indexSettings(Settings settings) { + return new IndexSettings(newIndexMeta("test", settings), Settings.EMPTY); } public void testNoMerges() { - MergePolicyConfig mp = new MergePolicyConfig(logger, Settings.builder().put(MergePolicyConfig.INDEX_MERGE_ENABLED, false).build()); + MergePolicyConfig mp = new MergePolicyConfig(logger, indexSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_ENABLED, false).build())); assertTrue(mp.getMergePolicy() instanceof NoMergePolicy); } public void testUpdateSettings() throws IOException { - { - MergePolicyConfig mp = new MergePolicyConfig(logger, EMPTY_SETTINGS); - assertThat((mp.getMergePolicy()).getNoCFSRatio(), equalTo(0.1)); - - mp.onRefreshSettings(build(1.0)); - assertThat((mp.getMergePolicy()).getNoCFSRatio(), equalTo(1.0)); - - mp.onRefreshSettings(build(0.1)); - assertThat((mp.getMergePolicy()).getNoCFSRatio(), equalTo(0.1)); - - mp.onRefreshSettings(build(0.0)); - assertThat((mp.getMergePolicy()).getNoCFSRatio(), equalTo(0.0)); - } - - + IndexSettings indexSettings = indexSettings(EMPTY_SETTINGS); + assertThat(indexSettings.getMergePolicy().getNoCFSRatio(), equalTo(0.1)); + indexSettings = indexSettings(build(0.9)); + assertThat((indexSettings.getMergePolicy()).getNoCFSRatio(), equalTo(0.9)); + indexSettings.updateIndexMetaData(newIndexMeta("index", build(0.1))); + assertThat((indexSettings.getMergePolicy()).getNoCFSRatio(), equalTo(0.1)); + indexSettings.updateIndexMetaData(newIndexMeta("index", build(0.0))); + assertThat((indexSettings.getMergePolicy()).getNoCFSRatio(), equalTo(0.0)); + indexSettings.updateIndexMetaData(newIndexMeta("index", build("true"))); + assertThat((indexSettings.getMergePolicy()).getNoCFSRatio(), equalTo(1.0)); + indexSettings.updateIndexMetaData(newIndexMeta("index", build("false"))); + assertThat((indexSettings.getMergePolicy()).getNoCFSRatio(), equalTo(0.0)); } public void testTieredMergePolicySettingsUpdate() throws IOException { - MergePolicyConfig mp = new MergePolicyConfig(logger, EMPTY_SETTINGS); - assertThat(mp.getMergePolicy().getNoCFSRatio(), equalTo(0.1)); + IndexSettings indexSettings = indexSettings(Settings.EMPTY); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED, MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d, 0.0d); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING.getKey(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d, 0.0d); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getFloorSegmentMB(), MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mbFrac(), 0); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT, new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb() + 1, ByteSizeUnit.MB)).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb() + 1, ByteSizeUnit.MB).mbFrac(), 0.001); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mbFrac(), 0); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING.getKey(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb() + 1, ByteSizeUnit.MB)).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb() + 1, ByteSizeUnit.MB).mbFrac(), 0.001); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE - 1).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE-1); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE - 1).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE - 1); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT, MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT - 1).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT-1); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING.getKey(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT - 1).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT-1); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergedSegmentMB(), MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.mbFrac(), 0.0001); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT, new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1)).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1).mbFrac(), 0.0001); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.mbFrac(), 0.0001); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING.getKey(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1)).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1).mbFrac(), 0.0001); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1, 0); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING.getKey(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1, 0); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1, 0); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1, 0); - mp.onRefreshSettings(EMPTY_SETTINGS); // update without the settings and see if we stick to the values - - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d, 0.0d); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb() + 1, ByteSizeUnit.MB).mbFrac(), 0.001); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE-1); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT-1); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1).mbFrac(), 0.0001); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1, 0); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1, 0); + indexSettings.updateIndexMetaData(newIndexMeta("index", EMPTY_SETTINGS)); // see if defaults are restored + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb(), ByteSizeUnit.MB).mbFrac(), 0.00); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1).mbFrac(), 0.0001); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0); } public Settings build(String value) { - return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT, value).build(); + return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), value).build(); } public Settings build(double value) { - return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT, value).build(); + return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), value).build(); } public Settings build(int value) { - return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT, value).build(); + return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), value).build(); } public Settings build(boolean value) { - return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT, value).build(); + return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), value).build(); } diff --git a/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java new file mode 100644 index 00000000000..15bf60baab4 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -0,0 +1,253 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + + +public class SearchSlowLogTests extends ESTestCase { + + public void testReformatSetting() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT.getKey(), false) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + SearchSlowLog log = new SearchSlowLog(settings); + assertFalse(log.isReformat()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT.getKey(), "true").build())); + assertTrue(log.isReformat()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT.getKey(), "false").build())); + assertFalse(log.isReformat()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertTrue(log.isReformat()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new SearchSlowLog(settings); + assertTrue(log.isReformat()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT.getKey(), "NOT A BOOLEAN").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse value [NOT A BOOLEAN] cannot be parsed to boolean [ true/1/on/yes OR false/0/off/no ]"); + } + assertTrue(log.isReformat()); + } + + public void testLevelSetting() { + SlowLogLevel level = randomFrom(SlowLogLevel.values()); + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), level) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + SearchSlowLog log = new SearchSlowLog(settings); + assertEquals(level, log.getLevel()); + level = randomFrom(SlowLogLevel.values()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), level).build())); + assertEquals(level, log.getLevel()); + level = randomFrom(SlowLogLevel.values()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), level).build())); + assertEquals(level, log.getLevel()); + + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), level).build())); + assertEquals(level, log.getLevel()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertEquals(SlowLogLevel.TRACE, log.getLevel()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new SearchSlowLog(settings); + assertTrue(log.isReformat()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), "NOT A LEVEL").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "No enum constant org.elasticsearch.index.SlowLogLevel.NOT A LEVEL"); + } + assertEquals(SlowLogLevel.TRACE, log.getLevel()); + } + + public void testSetQueryLevels() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING.getKey(), "100ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING.getKey(), "200ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING.getKey(), "300ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING.getKey(), "400ms") + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + SearchSlowLog log = new SearchSlowLog(settings); + assertEquals(TimeValue.timeValueMillis(100).nanos(), log.getQueryTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(200).nanos(), log.getQueryDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(300).nanos(), log.getQueryInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(400).nanos(), log.getQueryWarnThreshold()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING.getKey(), "120ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING.getKey(), "220ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING.getKey(), "320ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING.getKey(), "420ms").build())); + + + assertEquals(TimeValue.timeValueMillis(120).nanos(), log.getQueryTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(220).nanos(), log.getQueryDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(320).nanos(), log.getQueryInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(420).nanos(), log.getQueryWarnThreshold()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings.updateIndexMetaData(metaData); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryWarnThreshold()); + + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new SearchSlowLog(settings); + + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryWarnThreshold()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + } + + public void testSetFetchLevels() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING.getKey(), "100ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING.getKey(), "200ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING.getKey(), "300ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING.getKey(), "400ms") + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + SearchSlowLog log = new SearchSlowLog(settings); + assertEquals(TimeValue.timeValueMillis(100).nanos(), log.getFetchTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(200).nanos(), log.getFetchDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(300).nanos(), log.getFetchInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(400).nanos(), log.getFetchWarnThreshold()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING.getKey(), "120ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING.getKey(), "220ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING.getKey(), "320ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING.getKey(), "420ms").build())); + + + assertEquals(TimeValue.timeValueMillis(120).nanos(), log.getFetchTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(220).nanos(), log.getFetchDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(320).nanos(), log.getFetchInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(420).nanos(), log.getFetchWarnThreshold()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings.updateIndexMetaData(metaData); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchWarnThreshold()); + + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new SearchSlowLog(settings); + + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchWarnThreshold()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + } + + + private IndexMetaData newIndexMeta(String name, Settings indexSettings) { + Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(indexSettings) + .build(); + IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); + return metaData; + } +} diff --git a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java index f62d44df43f..7dbff244fcc 100644 --- a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java +++ b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java @@ -18,18 +18,17 @@ */ package org.elasticsearch.index; -import org.elasticsearch.cluster.ClusterModule; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import java.util.Collection; import java.util.Collections; -import java.util.function.Consumer; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -44,7 +43,7 @@ public class SettingsListenerIT extends ESIntegTestCase { public static class SettingsListenerPlugin extends Plugin { private final SettingsTestingService service = new SettingsTestingService(); - + private static final Setting SETTING = Setting.intSetting("index.test.new.setting", 0, true, Setting.Scope.INDEX); /** * The name of the plugin. */ @@ -61,15 +60,15 @@ public class SettingsListenerIT extends ESIntegTestCase { return "Settings Listenern Plugin"; } - public void onModule(ClusterModule clusterModule) { - clusterModule.registerIndexDynamicSetting("index.test.new.setting", Validator.INTEGER); + public void onModule(SettingsModule settingsModule) { + settingsModule.registerSetting(SettingsTestingService.VALUE); } @Override public void onIndexModule(IndexModule module) { if (module.getIndex().getName().equals("test")) { // only for the test index - module.addIndexSettingsListener(service); - service.accept(module.getSettings()); + module.addSettingsUpdateConsumer(SettingsTestingService.VALUE, service::setValue); + service.setValue(SettingsTestingService.VALUE.get(module.getSettings())); } } @@ -92,13 +91,14 @@ public class SettingsListenerIT extends ESIntegTestCase { } } - public static class SettingsTestingService implements Consumer { + public static class SettingsTestingService { public volatile int value; + public static Setting VALUE = Setting.intSetting("index.test.new.setting", -1, -1, true, Setting.Scope.INDEX); - @Override - public void accept(Settings settings) { - value = settings.getAsInt("index.test.new.setting", -1); + public void setValue(int value) { + this.value = value; } + } public void testListener() { @@ -130,5 +130,13 @@ public class SettingsListenerIT extends ESIntegTestCase { for (SettingsTestingService instance : internalCluster().getInstances(SettingsTestingService.class)) { assertEquals(42, instance.value); } + + try { + client().admin().indices().prepareUpdateSettings("other").setSettings(Settings.builder() + .put("index.test.new.setting", -5)).get(); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [-5] for setting [index.test.new.setting] must be >= -1", ex.getMessage()); + } } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 1a88fcbc0c1..f4792f0bc37 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -27,9 +27,12 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.util.Collection; import java.util.Locale; import static org.elasticsearch.test.VersionUtils.randomVersion; @@ -40,6 +43,12 @@ import static org.hamcrest.Matchers.is; * */ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testThatDefaultAndStandardAnalyzerAreTheSameInstance() { Analyzer currentStandardAnalyzer = PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT); Analyzer currentDefaultAnalyzer = PreBuiltAnalyzers.DEFAULT.getAnalyzer(Version.CURRENT); diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 03a87dec232..7cfe52d1c09 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -60,6 +60,7 @@ import static org.hamcrest.Matchers.instanceOf; @SuppressCodecs("*") // we test against default codec so never get a random one here! public class CodecTests extends ESTestCase { + public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java index a873e61b1af..8a4da8e3ca5 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java @@ -43,9 +43,9 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { long gcDeletes = random().nextLong() & (Long.MAX_VALUE >> 11); Settings build = Settings.builder() - .put(IndexSettings.INDEX_GC_DELETES_SETTING, gcDeletes, TimeUnit.MILLISECONDS) + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), gcDeletes, TimeUnit.MILLISECONDS) .build(); - assertEquals(gcDeletes, build.getAsTime(IndexSettings.INDEX_GC_DELETES_SETTING, null).millis()); + assertEquals(gcDeletes, build.getAsTime(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), null).millis()); client().admin().indices().prepareUpdateSettings("foo").setSettings(build).get(); LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig(); @@ -58,7 +58,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { } Settings settings = Settings.builder() - .put(IndexSettings.INDEX_GC_DELETES_SETTING, 1000, TimeUnit.MILLISECONDS) + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), 1000, TimeUnit.MILLISECONDS) .build(); client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get(); assertEquals(engine.getGcDeletesInMillis(), 1000); @@ -66,7 +66,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { settings = Settings.builder() - .put(IndexSettings.INDEX_GC_DELETES_SETTING, "0ms") + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), "0ms") .build(); client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get(); @@ -74,7 +74,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { assertTrue(engine.config().isEnableGcDeletes()); settings = Settings.builder() - .put(IndexSettings.INDEX_GC_DELETES_SETTING, 1000, TimeUnit.MILLISECONDS) + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), 1000, TimeUnit.MILLISECONDS) .build(); client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get(); assertEquals(engine.getGcDeletesInMillis(), 1000); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 56affa3db23..2b72018aa8e 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -166,7 +166,7 @@ public class InternalEngineTests extends ESTestCase { } defaultSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() .put(IndexSettings.INDEX_GC_DELETES_SETTING, "1h") // make sure this doesn't kick in on us - .put(EngineConfig.INDEX_CODEC_SETTING, codecName) + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), codecName) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build()); // TODO randomize more settings threadPool = new ThreadPool(getClass().getName()); @@ -1600,8 +1600,7 @@ public class InternalEngineTests extends ESTestCase { } // now it should be OK. IndexSettings indexSettings = new IndexSettings(defaultSettings.getIndexMetaData(), - Settings.builder().put(defaultSettings.getSettings()).put(EngineConfig.INDEX_FORCE_NEW_TRANSLOG, true).build(), - Collections.emptyList()); + Settings.builder().put(defaultSettings.getSettings()).put(EngineConfig.INDEX_FORCE_NEW_TRANSLOG, true).build()); engine = createEngine(indexSettings, store, primaryTranslogDir, newMergePolicy()); } diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 2eca00dc5a6..71ad0e16909 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -118,7 +118,7 @@ public class ShadowEngineTests extends ESTestCase { } defaultSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() .put(IndexSettings.INDEX_GC_DELETES_SETTING, "1h") // make sure this doesn't kick in on us - .put(EngineConfig.INDEX_CODEC_SETTING, codecName) + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), codecName) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build()); // TODO randomize more settings diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 70e3b66553c..07ae1e70a48 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -49,12 +49,15 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.Collection; import static org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import static org.hamcrest.Matchers.equalTo; @@ -84,6 +87,11 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase { return getForField(type, fieldName, hasDocValues()); } + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public > IFD getForField(FieldDataType type, String fieldName, boolean docValues) { final MappedFieldType fieldType; final BuilderContext context = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java index 13f7f74e37b..35a74ea3849 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java @@ -106,7 +106,7 @@ public class FieldDataCacheTests extends ESTestCase { .numberOfReplicas(0) .creationDate(System.currentTimeMillis()) .build(); - return new IndexSettings(indexMetaData, settings, Collections.emptyList()); + return new IndexSettings(indexMetaData, settings); } private class DummyAccountingFieldDataCache implements IndexFieldDataCache { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java index eefe8c89183..0187ababfe0 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java @@ -36,6 +36,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; @@ -60,10 +61,10 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { @Before public void before() throws Exception { mapperService.merge( - childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true, false + childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), MapperService.MergeReason.MAPPING_UPDATE, false ); mapperService.merge( - grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true, false + grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), MapperService.MergeReason.MAPPING_UPDATE, false ); Document d = new Document(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java index f6cfcec041a..f8859efa025 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java @@ -67,7 +67,7 @@ public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { public void setUp() throws Exception { super.setUp(); settings = Settings.builder() - .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, false) + .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false) .build(); clusterService = new TestClusterService(THREAD_POOL); transport = new LocalTransport(settings, THREAD_POOL, Version.CURRENT, new NamedWriteableRegistry()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 22a10ab8229..1a4fb0d9c4c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -482,7 +482,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject().endObject(); - indexService.mapperService().merge("type1", new CompressedXContent(mappings1.bytes()), true, false); + indexService.mapperService().merge("type1", new CompressedXContent(mappings1.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); XContentBuilder mappings2 = jsonBuilder().startObject() .startObject("type2") .startObject("properties") @@ -491,7 +491,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject(); - indexService.mapperService().merge("type2", new CompressedXContent(mappings2.bytes()), true, false); + indexService.mapperService().merge("type2", new CompressedXContent(mappings2.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); XContentBuilder json = XContentFactory.jsonBuilder().startObject() .field("field", "foo") @@ -502,7 +502,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { ParsedDocument parsed = mapper.parse(source); assertNotNull(parsed.dynamicMappingsUpdate()); - indexService.mapperService().merge("type1", new CompressedXContent(parsed.dynamicMappingsUpdate().toString()), false, false); + indexService.mapperService().merge("type1", new CompressedXContent(parsed.dynamicMappingsUpdate().toString()), MapperService.MergeReason.MAPPING_UPDATE, false); mapper = indexService.mapperService().documentMapper("type1"); assertNotNull(mapper.mappers().getMapper("field.raw")); parsed = mapper.parse(source); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 94445d3aadb..83eeaf36ffd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -83,15 +83,15 @@ public class MapperServiceTests extends ESSingleNodeTestCase { MapperService mapperService = indexService1.mapperService(); assertEquals(Collections.emptySet(), mapperService.types()); - mapperService.merge("type1", new CompressedXContent("{\"type1\":{}}"), true, false); + mapperService.merge("type1", new CompressedXContent("{\"type1\":{}}"), MapperService.MergeReason.MAPPING_UPDATE, false); assertNull(mapperService.documentMapper(MapperService.DEFAULT_MAPPING)); assertEquals(Collections.singleton("type1"), mapperService.types()); - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent("{\"_default_\":{}}"), true, false); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent("{\"_default_\":{}}"), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotNull(mapperService.documentMapper(MapperService.DEFAULT_MAPPING)); assertEquals(Collections.singleton("type1"), mapperService.types()); - mapperService.merge("type2", new CompressedXContent("{\"type2\":{}}"), true, false); + mapperService.merge("type2", new CompressedXContent("{\"type2\":{}}"), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotNull(mapperService.documentMapper(MapperService.DEFAULT_MAPPING)); assertEquals(new HashSet<>(Arrays.asList("type1", "type2")), mapperService.types()); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 841249510d1..642e35eef88 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -43,14 +43,16 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.Matchers; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -68,6 +70,11 @@ import static org.hamcrest.Matchers.nullValue; public class SimpleAllMapperTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testSimpleAllMappers() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json"); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java index b5a54ce92bd..0a0f746ccd5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java @@ -26,7 +26,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.util.Collection; import static org.hamcrest.Matchers.equalTo; @@ -34,6 +38,11 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase { private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testBackCompatCustomBoostValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("s_field").field("type", "string").endObject() @@ -68,4 +77,4 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f)); assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java index bb5aecd9ec9..6b83ae51385 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java @@ -29,7 +29,11 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.util.Collection; import static org.hamcrest.Matchers.closeTo; @@ -39,6 +43,11 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testBackCompatFieldLevelBoost() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("str_field").field("type", "string").endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java index fa7bbf8f249..ea66eaa4c5c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java @@ -26,13 +26,22 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.util.Collection; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; public class CompoundTypesTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); public void testBackCompatStringType() throws Exception { @@ -74,4 +83,4 @@ public class CompoundTypesTests extends ESSingleNodeTestCase { assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(1.0d, 0.000001d)); assertThat(doc.rootDoc().get("field2"), equalTo("value2")); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java index daf54d501d7..d1de843e38f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java @@ -312,11 +312,11 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), true, false); + DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), MapperService.MergeReason.MAPPING_UPDATE, false); assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); - DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter), false, false); + DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter), MapperService.MergeReason.MAPPING_UPDATE, false); assertEquals(Arrays.asList("baz", "bar"), docMapperAfter.mappers().getMapper("copy_test").copyTo().copyToFields()); assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java index 3aa04ba0f01..2175f2ce3e7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; @@ -125,7 +126,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject() .endObject().endObject().string(); - DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false); + DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); BytesReference source = XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java index a746717b73a..e8fe39e9bbb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java @@ -52,7 +52,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), true, false); + DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject() .startObject("person") @@ -63,7 +63,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().string(); - DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), false, false); + DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); // previous mapper has not been modified assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java index ba05ea81054..c42924132ff 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java @@ -32,9 +32,12 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -46,6 +49,11 @@ import static org.hamcrest.Matchers.notNullValue; */ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testExternalValues() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 758e5a38294..2ea19b02450 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -35,11 +35,14 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.geo.RandomGeoGenerator; +import java.util.Collection; import java.util.List; import java.util.Map; @@ -52,6 +55,12 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testLatLonValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() @@ -662,12 +671,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); MapperService mapperService = createIndex("test", settings).mapperService(); - DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false); + DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false) .field("geohash", false).endObject().endObject().endObject().endObject().string(); try { - mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]")); @@ -679,7 +688,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); - mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); } public void testGeoHashSearch() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java index af03d3accb1..0ad4dbd87ba 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java @@ -376,13 +376,13 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw") .endObject().endObject().endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false); + DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree") .field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26) .field("orientation", "cw").endObject().endObject().endObject().endObject().string(); try { - mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [shape] has different [strategy]")); @@ -408,7 +408,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m") .field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string(); - docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); fieldMapper = docMapper.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java index 9e0d7b596eb..c2cbee42cf2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java @@ -29,9 +29,13 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; +import java.util.Collection; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -42,6 +46,12 @@ import static org.hamcrest.Matchers.nullValue; * */ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testLatLonValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java index 2e28f60e7d9..e23ea15b64f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java @@ -148,11 +148,11 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); - DocumentMapper mapperDisabled = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); + DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper mapperDisabled = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertFalse(mapperDisabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); - mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), false, false); + mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertTrue(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java index 309fa274919..77e7b46537e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java @@ -25,10 +25,19 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.util.Collection; public class TypeFieldMapperTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testDocValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java index 82a8918c66b..5886b3e486f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java @@ -38,6 +38,7 @@ import static org.hamcrest.Matchers.nullValue; * */ public class SimpleIpMappingTests extends ESSingleNodeTestCase { + public void testSimpleMapping() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("ip").field("type", "ip").endObject().endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index 80f7942bbcc..4614909568c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.io.IOException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -135,8 +136,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("ignore_above", 14).endObject().endObject() .endObject().endObject().string(); - DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), true, false); - DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), false, false); + DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE, false); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); @@ -146,7 +147,7 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { public void testConcurrentMergeTest() throws Throwable { final MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), true, false); + mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), MapperService.MergeReason.MAPPING_UPDATE, false); final DocumentMapper documentMapper = mapperService.documentMapper("test"); DocumentFieldMappers dfm = documentMapper.mappers(); @@ -172,7 +173,7 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { Mapping update = doc.dynamicMappingsUpdate(); assert update != null; lastIntroducedFieldName.set(fieldName); - mapperService.merge("test", new CompressedXContent(update.toString()), false, false); + mapperService.merge("test", new CompressedXContent(update.toString()), MapperService.MergeReason.MAPPING_UPDATE, false); } } catch (Throwable t) { error.set(t); @@ -203,4 +204,28 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { throw error.get(); } } + + public void testDoNotRepeatOriginalMapping() throws IOException { + CompressedXContent mapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("_source") + .field("enabled", false) + .endObject() + .endObject().endObject().bytes()); + MapperService mapperService = createIndex("test").mapperService(); + mapperService.merge("type", mapping, MapperService.MergeReason.MAPPING_UPDATE, false); + + CompressedXContent update = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("foo") + .field("type", "string") + .endObject() + .endObject() + .endObject().endObject().bytes()); + DocumentMapper mapper = mapperService.merge("type", update, MapperService.MergeReason.MAPPING_UPDATE, false); + + assertNotNull(mapper.mappers().getMapper("foo")); + assertFalse(mapper.sourceMapper().enabled()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java index 651b8c45d55..e08d610d3f2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java @@ -42,7 +42,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), true, false); + DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); @@ -55,7 +55,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -72,7 +72,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -83,7 +83,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -98,7 +98,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), true, false); + DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); @@ -112,7 +112,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -129,7 +129,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -142,7 +142,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json"); try { - mapperService.merge("person", new CompressedXContent(mapping), false, false); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [name] has different [index] values")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java index 6debfa05ee9..5c846b09ab9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java @@ -20,14 +20,22 @@ package org.elasticsearch.index.mapper.nested; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper.Dynamic; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.function.Function; + +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -340,4 +348,58 @@ public class NestedMappingTests extends ESSingleNodeTestCase { assertThat(doc.docs().get(1).get("field"), nullValue()); assertThat(doc.docs().get(2).get("field"), equalTo("value")); } -} \ No newline at end of file + + public void testLimitOfNestedFieldsPerIndex() throws Exception { + Function mapping = type -> { + try { + return XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") + .startObject("nested1").field("type", "nested").startObject("properties") + .startObject("nested2").field("type", "nested") + .endObject().endObject() + .endObject().endObject().endObject().string(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }; + + // default limit allows at least two nested fields + createIndex("test1").mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false); + + // explicitly setting limit to 0 prevents nested fields + try { + createIndex("test2", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build()) + .mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Limit of nested fields [0] in index [test2] has been exceeded")); + } + + // setting limit to 1 with 2 nested fields fails + try { + createIndex("test3", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 1).build()) + .mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Limit of nested fields [1] in index [test3] has been exceeded")); + } + + MapperService mapperService = createIndex("test4", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 2) + .build()).mapperService(); + mapperService.merge("type1", new CompressedXContent(mapping.apply("type1")), MergeReason.MAPPING_UPDATE, false); + // merging same fields, but different type is ok + mapperService.merge("type2", new CompressedXContent(mapping.apply("type2")), MergeReason.MAPPING_UPDATE, false); + // adding new fields from different type is not ok + String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type3").startObject("properties").startObject("nested3") + .field("type", "nested").startObject("properties").endObject().endObject().endObject().endObject().string(); + try { + mapperService.merge("type3", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Limit of nested fields [2] in index [test4] has been exceeded")); + } + + // do not check nested fields limit if mapping is not updated + createIndex("test5", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build()) + .mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_RECOVERY, false); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index 624978bf7d0..e68817e9ea0 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -41,10 +41,13 @@ import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.string.SimpleStringMappingTests; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -56,6 +59,12 @@ import static org.hamcrest.Matchers.nullValue; /** */ public class SimpleNumericTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testNumericDetectionEnabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .field("numeric_detection", true) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index 35b127b6283..f82dcb6eecc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -33,10 +33,13 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.io.IOException; +import java.util.Collection; import java.util.Map; import static org.hamcrest.Matchers.containsString; @@ -44,6 +47,11 @@ import static org.hamcrest.Matchers.equalTo; public class DefaultSourceMappingTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testNoFormat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").endObject() @@ -167,7 +175,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true, false); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), MapperService.MergeReason.MAPPING_UPDATE, false); DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").getDocumentMapper(); assertThat(mapper.type(), equalTo("my_type")); @@ -180,12 +188,12 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true, false); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), MapperService.MergeReason.MAPPING_UPDATE, false); String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") .startObject("_source").field("enabled", true).endObject() .endObject().endObject().string(); - mapperService.merge("my_type", new CompressedXContent(mapping), true, false); + mapperService.merge("my_type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); DocumentMapper mapper = mapperService.documentMapper("my_type"); assertThat(mapper.type(), equalTo("my_type")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 7bd4d9a78c3..d32dcad5434 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -480,7 +480,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = indexService.mapperService(); - DocumentMapper defaultMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false); + DocumentMapper defaultMapper = mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -494,7 +494,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject() .endObject().endObject().endObject().endObject().string(); - defaultMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); + defaultMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -509,7 +509,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject() .endObject().endObject().endObject().endObject().string(); try { - mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); + mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("different [omit_norms]")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index 51ef9ff0024..ed58bb63b65 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -36,17 +36,20 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; +import java.util.Collection; + import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.elasticsearch.test.VersionUtils.randomVersionBetween; @@ -62,6 +65,11 @@ import static org.hamcrest.Matchers.notNullValue; */ public class TimestampMappingTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testSimpleDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -115,12 +123,12 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .startObject("_timestamp").field("enabled", true).endObject() .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); + DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); + DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertThat(enabledMapper.timestampFieldMapper().enabled(), is(true)); assertThat(disabledMapper.timestampFieldMapper().enabled(), is(false)); @@ -366,9 +374,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { } void assertConflict(MapperService mapperService, String type, String mapping1, String mapping2, String conflict) throws IOException { - mapperService.merge("type", new CompressedXContent(mapping1), true, false); + mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE, false); try { - mapperService.merge("type", new CompressedXContent(mapping2), false, false); + mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE, false); assertNull(conflict); } catch (IllegalArgumentException e) { assertNotNull(conflict); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index fa27e9bcfb7..335d3a06e32 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -22,10 +22,6 @@ package org.elasticsearch.index.mapper.ttl; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -99,8 +95,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), true, false); - DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), false, false); + DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(false)); assertThat(mapperWithTtl.TTLFieldMapper().enabled(), equalTo(true)); @@ -122,8 +118,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), true, false); - DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); + DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); assertThat(updatedMapper.TTLFieldMapper().enabled(), equalTo(true)); @@ -133,10 +129,10 @@ public class TTLMappingTests extends ESSingleNodeTestCase { String mappingWithTtl = getMappingWithTtlEnabled().string(); String mappingWithTtlDisabled = getMappingWithTtlDisabled().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), true, false); + DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); try { - mapperService.merge("type", new CompressedXContent(mappingWithTtlDisabled), false, false); + mapperService.merge("type", new CompressedXContent(mappingWithTtlDisabled), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -174,20 +170,20 @@ public class TTLMappingTests extends ESSingleNodeTestCase { public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), randomBoolean(), false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false); } public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), randomBoolean(), false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false); } public void testMergeWithOnlyDefaultSet() throws Exception { XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), false, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), MapperService.MergeReason.MAPPING_UPDATE, false); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @@ -198,7 +194,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), false, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), MapperService.MergeReason.MAPPING_UPDATE, false); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index a53295d7fea..4e65c4a5ab7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -30,9 +30,12 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.util.Collection; import java.util.LinkedHashMap; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; @@ -41,6 +44,11 @@ import static org.hamcrest.CoreMatchers.equalTo; public class UpdateMappingTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testAllEnabledAfterDisabled() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().endObject(); XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject(); @@ -76,7 +84,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); // simulate like in MetaDataMappingService#putMapping - indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), false, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); // make sure mappings applied CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string())); @@ -99,7 +107,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); // simulate like in MetaDataMappingService#putMapping try { - indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), true, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -120,14 +128,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { .endObject().endObject().endObject(); try { - mapperService.merge("type", new CompressedXContent(update.string()), false, false); + mapperService.merge("type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]")); } try { - mapperService.merge("type", new CompressedXContent(update.string()), false, false); + mapperService.merge("type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]")); @@ -147,7 +155,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { .endObject().endObject().endObject(); try { - mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -155,7 +163,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { } try { - mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -174,15 +182,15 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { XContentBuilder mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject(); MapperService mapperService = createIndex("test", Settings.settingsBuilder().build()).mapperService(); - mapperService.merge("type1", new CompressedXContent(mapping1.string()), false, false); - mapperService.merge("type2", new CompressedXContent(mapping2.string()), false, false); + mapperService.merge("type1", new CompressedXContent(mapping1.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type2", new CompressedXContent(mapping2.string()), MapperService.MergeReason.MAPPING_UPDATE, false); XContentBuilder update = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties").startObject("foo").field("type", "double").endObject() .endObject().endObject().endObject(); try { - mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -190,7 +198,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { } try { - mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -209,14 +217,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { MapperService mapperService = createIndex("test", Settings.settingsBuilder().build()).mapperService(); try { - mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); } try { - mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); @@ -233,14 +241,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { MapperService mapperService = createIndex("test", settings).mapperService(); try { - mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); } try { - mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java index 5ce841540d1..9e46ce95041 100644 --- a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java @@ -46,12 +46,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { String mapper = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").endObject().endObject() .endObject().endObject().string(); - mapperService.merge("type", new CompressedXContent(mapper), true, true); + mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, true); String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(PercolatorService.TYPE_NAME) .startObject("properties").startObject("query").field("type", "percolator").endObject().endObject() .endObject().endObject().string(); - mapperService.merge(PercolatorService.TYPE_NAME, new CompressedXContent(percolatorMapper), true, true); + mapperService.merge(PercolatorService.TYPE_NAME, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); } public void testPercolatorFieldMapper() throws Exception { @@ -85,7 +85,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("query").field("type", "percolator").field("index", "no").endObject().endObject() .endObject().endObject().string(); try { - mapperService.merge(PercolatorService.TYPE_NAME, new CompressedXContent(percolatorMapper), true, true); + mapperService.merge(PercolatorService.TYPE_NAME, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); fail("MapperParsingException expected"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("Mapping definition for [query] has unsupported parameters: [index : no]")); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index b88d0be23b6..52aa7ea48f0 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; + import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -46,7 +47,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.geo.builders.ShapeBuilderRegistry; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; @@ -74,8 +74,6 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; -import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; @@ -93,9 +91,11 @@ import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.cluster.TestClusterService; @@ -196,21 +196,22 @@ public abstract class AbstractQueryTestCase> final TestClusterService clusterService = new TestClusterService(); clusterService.setState(new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder().put( new IndexMetaData.Builder(index.name()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); final Client proxy = (Client) Proxy.newProxyInstance( Client.class.getClassLoader(), new Class[]{Client.class}, clientInvocationHandler); + namedWriteableRegistry = new NamedWriteableRegistry(); injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), - new SettingsModule(settings, new SettingsFilter(settings)), + settingsModule, new ThreadPoolModule(new ThreadPool(settings)), new IndicesModule() { @Override public void configure() { // skip services - bindQueryParsersExtension(); bindMapperExtension(); - bind(ShapeBuilderRegistry.class).asEagerSingleton(); } }, new ScriptModule(settings) { @@ -234,20 +235,26 @@ public abstract class AbstractQueryTestCase> } catch(IOException e) { throw new IllegalStateException("error while binding ScriptService", e); } - - } }, new IndexSettingsModule(index, indexSettings), + new SearchModule(settings, namedWriteableRegistry) { + @Override + protected void configureSearch() { + // Skip me + } + @Override + protected void configureSuggesters() { + // Skip me + } + }, new AbstractModule() { @Override protected void configure() { bind(Client.class).toInstance(proxy); - Multibinder.newSetBinder(binder(), ScoreFunctionParser.class); - bind(ScoreFunctionParserMapper.class).asEagerSingleton(); bind(ClusterService.class).toProvider(Providers.of(clusterService)); bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); - bind(NamedWriteableRegistry.class).asEagerSingleton(); + bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); } } ).createInjector(); @@ -284,10 +291,11 @@ public abstract class AbstractQueryTestCase> OBJECT_FIELD_NAME, "type=object", GEO_POINT_FIELD_NAME, "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true", GEO_SHAPE_FIELD_NAME, "type=geo_shape" - ).string()), false, false); + ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); // also add mappings for two inner field in the object field mapperService.merge(type, new CompressedXContent("{\"properties\":{\""+OBJECT_FIELD_NAME+"\":{\"type\":\"object\"," - + "\"properties\":{\""+DATE_FIELD_NAME+"\":{\"type\":\"date\"},\""+INT_FIELD_NAME+"\":{\"type\":\"integer\"}}}}}"), false, false); + + "\"properties\":{\""+DATE_FIELD_NAME+"\":{\"type\":\"date\"},\""+INT_FIELD_NAME+"\":{\"type\":\"integer\"}}}}}"), + MapperService.MergeReason.MAPPING_UPDATE, false); currentTypes[i] = type; } namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class); @@ -483,7 +491,7 @@ public abstract class AbstractQueryTestCase> QueryParseContext context = createParseContext(); context.reset(parser); context.parseFieldMatcher(matcher); - QueryBuilder parseInnerQueryBuilder = context.parseInnerQueryBuilder(); + QueryBuilder parseInnerQueryBuilder = context.parseInnerQueryBuilder(); assertTrue(parser.nextToken() == null); return parseInnerQueryBuilder; } @@ -613,7 +621,7 @@ public abstract class AbstractQueryTestCase> testQuery.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { QueryBuilder prototype = queryParser(testQuery.getName()).getBuilderPrototype(); - QueryBuilder deserializedQuery = prototype.readFrom(in); + QueryBuilder deserializedQuery = prototype.readFrom(in); assertEquals(deserializedQuery, testQuery); assertEquals(deserializedQuery.hashCode(), testQuery.hashCode()); assertNotSame(deserializedQuery, testQuery); @@ -808,12 +816,6 @@ public abstract class AbstractQueryTestCase> } protected static Fuzziness randomFuzziness(String fieldName) { - if (randomBoolean()) { - return Fuzziness.fromEdits(randomIntBetween(0, 2)); - } - if (randomBoolean()) { - return Fuzziness.AUTO; - } switch (fieldName) { case INT_FIELD_NAME: return Fuzziness.build(randomIntBetween(3, 100)); @@ -822,6 +824,9 @@ public abstract class AbstractQueryTestCase> case DATE_FIELD_NAME: return Fuzziness.build(randomTimeValue()); default: + if (randomBoolean()) { + return Fuzziness.fromEdits(randomIntBetween(0, 2)); + } return Fuzziness.AUTO; } } @@ -845,7 +850,7 @@ public abstract class AbstractQueryTestCase> private static final List TIMEZONE_IDS = new ArrayList<>(DateTimeZone.getAvailableIDs()); private static class ClientInvocationHandler implements InvocationHandler { - AbstractQueryTestCase delegate; + AbstractQueryTestCase delegate; @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (method.equals(Client.class.getMethod("get", GetRequest.class))) { diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 1c407fbaa0e..b15513222a6 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -106,16 +106,16 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase clauseIterator = clauses.iterator(); - for (BooleanClause booleanClause : booleanQuery.getClauses()) { + for (BooleanClause booleanClause : booleanQuery.clauses()) { assertThat(booleanClause, instanceOf(clauseIterator.next().getClass())); } } } } - private static List getBooleanClauses(List queryBuilders, BooleanClause.Occur occur, QueryShardContext context) throws IOException { + private static List getBooleanClauses(List> queryBuilders, BooleanClause.Occur occur, QueryShardContext context) throws IOException { List clauses = new ArrayList<>(); - for (QueryBuilder query : queryBuilders) { + for (QueryBuilder query : queryBuilders) { Query innerQuery = query.toQuery(context); if (innerQuery != null) { clauses.add(new BooleanClause(innerQuery, occur)); @@ -132,22 +132,22 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase 0) { - QueryBuilder must = tempQueryBuilder.must().get(0); + QueryBuilder must = tempQueryBuilder.must().get(0); contentString += "must: " + must.toString() + ","; expectedQuery.must(must); } if (tempQueryBuilder.mustNot().size() > 0) { - QueryBuilder mustNot = tempQueryBuilder.mustNot().get(0); + QueryBuilder mustNot = tempQueryBuilder.mustNot().get(0); contentString += (randomBoolean() ? "must_not: " : "mustNot: ") + mustNot.toString() + ","; expectedQuery.mustNot(mustNot); } if (tempQueryBuilder.should().size() > 0) { - QueryBuilder should = tempQueryBuilder.should().get(0); + QueryBuilder should = tempQueryBuilder.should().get(0); contentString += "should: " + should.toString() + ","; expectedQuery.should(should); } if (tempQueryBuilder.filter().size() > 0) { - QueryBuilder filter = tempQueryBuilder.filter().get(0); + QueryBuilder filter = tempQueryBuilder.filter().get(0); contentString += "filter: " + filter.toString() + ","; expectedQuery.filter(filter); } @@ -272,6 +272,17 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase getAlternateVersions() { Map alternateVersions = new HashMap<>(); - QueryBuilder innerQuery = createTestQueryBuilder().innerQueries().get(0); + QueryBuilder innerQuery = createTestQueryBuilder().innerQueries().get(0); DisMaxQueryBuilder expectedQuery = new DisMaxQueryBuilder(); expectedQuery.add(innerQuery); String contentString = "{\n" + diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java index f2b3a1a5026..15d37150b3f 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -45,6 +45,7 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.TestSearchContext; +import org.junit.BeforeClass; import java.io.IOException; import java.util.Collections; @@ -57,9 +58,8 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase field = fieldsIterator.next(); assertTermOrBoostQuery(booleanClause.getQuery(), field.getKey(), queryBuilder.value(), field.getValue()); } - if (queryBuilder.minimumShouldMatch() != null) { + if (queryBuilder.minimumShouldMatch() != null && !boolQuery.isCoordDisabled()) { assertThat(boolQuery.getMinimumNumberShouldMatch(), greaterThan(0)); } } else if (queryBuilder.fields().size() == 1) { diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java index dc8d096f688..3b8ae51559e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java @@ -50,7 +50,7 @@ public class SpanNearQueryBuilderTests extends AbstractQueryTestCase spanQueryBuilderIterator = queryBuilder.clauses().iterator(); + Iterator> spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanNearQuery.getClauses()) { assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java index 7a8b66139e3..c3d7be9ae9e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java @@ -45,7 +45,7 @@ public class SpanOrQueryBuilderTests extends AbstractQueryTestCase spanQueryBuilderIterator = queryBuilder.clauses().iterator(); + Iterator> spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanOrQuery.getClauses()) { assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } @@ -70,32 +70,32 @@ public class SpanOrQueryBuilderTests extends AbstractQueryTestCase)queryBuilder).buildAsBytes(XContentType.values()[i])); } } } @@ -414,7 +415,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase)queryBuilder).buildAsBytes(XContentType.values()[i])); } } } diff --git a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java index c72470c0cce..98dbf67de57 100644 --- a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java +++ b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java @@ -19,6 +19,8 @@ package org.elasticsearch.index.query.plugin; +import java.io.IOException; + import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -31,10 +33,8 @@ import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.plugins.Plugin; - -import java.io.IOException; +import org.elasticsearch.search.SearchModule; public class DummyQueryParserPlugin extends Plugin { @@ -48,8 +48,8 @@ public class DummyQueryParserPlugin extends Plugin { return "dummy query"; } - public void onModule(IndicesModule module) { - module.registerQueryParser(DummyQueryParser.class); + public void onModule(SearchModule module) { + module.registerQueryParser(DummyQueryParser::new); } public static class DummyQueryBuilder extends AbstractQueryBuilder { diff --git a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index 236198261d7..46c8d3745ca 100644 --- a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -64,7 +64,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { " }\n" + " }\n" + "}"; - mapperService.merge("person", new CompressedXContent(mapping), true, false); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); this.indexService = indexService; } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 446f5cdd7f9..fd32091d891 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -82,17 +82,18 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.search.stats.SearchSlowLog; import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveryState; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.io.IOException; @@ -101,6 +102,7 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -128,6 +130,11 @@ import static org.hamcrest.Matchers.equalTo; */ public class IndexShardTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testWriteShardState() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { ShardId id = new ShardId("foo", 1); @@ -400,7 +407,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } private void setDurability(IndexShard shard, Translog.Durability durability) { - client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, durability.name()).build()).get(); + client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), durability.name()).build()).get(); assertEquals(durability, shard.getTranslogDurability()); } @@ -692,13 +699,13 @@ public class IndexShardTests extends ESSingleNodeTestCase { } public void testMaybeFlush() throws Exception { - createIndex("test", settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.REQUEST).build()); + createIndex("test", settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST).build()); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); assertFalse(shard.shouldFlush()); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null); @@ -714,7 +721,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { shard.getEngine().getTranslog().sync(); long size = shard.getEngine().getTranslog().sizeInBytes(); logger.info("--> current translog size: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(size, ByteSizeUnit.BYTES)) + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(size, ByteSizeUnit.BYTES)) .build()).get(); client().prepareDelete("test", "test", "2").get(); logger.info("--> translog size after delete: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); @@ -732,7 +739,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexService test = indicesService.indexService("test"); final IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); assertFalse(shard.shouldFlush()); final AtomicBoolean running = new AtomicBoolean(true); diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index dd5ca6bcc51..30f68ccb70e 100644 --- a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -39,15 +39,24 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.io.IOException; +import java.util.Collection; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; public class SimilarityTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testResolveDefaultSimilarities() { SimilarityService similarityService = createIndex("foo").similarityService(); assertThat(similarityService.getSimilarity("classic").get(), instanceOf(ClassicSimilarity.class)); diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index f7fbe3dc8a8..2c9de235b7d 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -62,6 +62,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.MockIndexEventListener; import org.elasticsearch.test.store.MockFSIndexStore; @@ -107,6 +108,7 @@ import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) public class CorruptedFileIT extends ESIntegTestCase { + @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() @@ -121,7 +123,8 @@ public class CorruptedFileIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(MockTransportService.TestPlugin.class, MockIndexEventListener.TestPlugin.class); + return pluginList(MockTransportService.TestPlugin.class, MockIndexEventListener.TestPlugin.class, MockFSIndexStore.TestPlugin.class, + InternalSettingsPlugin.class); // uses index.version.created } /** @@ -141,9 +144,8 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "1") .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files - .put("indices.recovery.concurrent_streams", 10) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files )); ensureGreen(); disableAllocation("test"); @@ -246,9 +248,8 @@ public class CorruptedFileIT extends ESIntegTestCase { assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files - .put("indices.recovery.concurrent_streams", 10) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files )); ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; @@ -329,7 +330,7 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put("index.routing.allocation.include._name", primariesNode.getNode().name()) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE) + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) )); ensureGreen(); // allocated with empty commit @@ -391,9 +392,9 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, between(1, 4)) // don't go crazy here it must recovery fast // This does corrupt files on the replica, so we can't check: - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) .put("index.routing.allocation.include._name", primariesNode.getNode().name()) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE) + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) )); ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; @@ -472,9 +473,8 @@ public class CorruptedFileIT extends ESIntegTestCase { assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") // no replicas for this test .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files - .put("indices.recovery.concurrent_streams", 10) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files )); ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; @@ -524,12 +524,11 @@ public class CorruptedFileIT extends ESIntegTestCase { internalCluster().ensureAtLeastNumDataNodes(2); assertAcked(prepareCreate("test").setSettings(Settings.builder() - .put(PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS, "one") + .put(PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS_SETTING.getKey(), "one") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, cluster().numDataNodes() - 1) .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files - .put("indices.recovery.concurrent_streams", 10) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files )); ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index 3ff7aa97d6a..500cf07692e 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -167,13 +167,13 @@ public class CorruptedTranslogIT extends ESIntegTestCase { /** Disables translog flushing for the specified index */ private static void disableTranslogFlush(String index) { - Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)).build(); + Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)).build(); client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); } /** Enables translog flushing for the specified index */ private static void enableTranslogFlush(String index) { - Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB)).build(); + Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)).build(); client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); } } diff --git a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java index f27d9ddcd2b..300e4bb9ab4 100644 --- a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java @@ -49,7 +49,7 @@ public class IndexStoreTests extends ESTestCase { final Path tempDir = createTempDir().resolve("foo").resolve("0"); final IndexModule.Type[] values = IndexModule.Type.values(); final IndexModule.Type type = RandomPicks.randomFrom(random(), values); - Settings settings = Settings.settingsBuilder().put(IndexModule.STORE_TYPE, type.name().toLowerCase(Locale.ROOT)) + Settings settings = Settings.settingsBuilder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), type.name().toLowerCase(Locale.ROOT)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("foo"), settings); FsDirectoryService service = new FsDirectoryService(indexSettings, null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", 0))); diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index acaa1cf8b5d..ed98fc1bacd 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -1154,7 +1154,7 @@ public class StoreTests extends ESTestCase { DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) - .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, TimeValue.timeValueMinutes(0)).build(); + .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(0)).build(); Store store = new Store(shardId, IndexSettingsModule.newIndexSettings(new Index("index"), settings), directoryService, new DummyShardLock(shardId)); long initialStoreSize = 0; for (String extraFiles : store.directory().listAll()) { diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index da5e5c8aa08..e1935328232 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -138,8 +138,8 @@ public class TranslogTests extends ESTestCase { private TranslogConfig getTranslogConfig(Path path) { Settings build = Settings.settingsBuilder() - .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) - .build(); + .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) + .build(); ByteSizeValue bufferSize = randomBoolean() ? TranslogConfig.DEFAULT_BUFFER_SIZE : new ByteSizeValue(10 + randomInt(128 * 1024), ByteSizeUnit.BYTES); return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), BigArrays.NON_RECYCLING_INSTANCE, bufferSize); } @@ -234,19 +234,16 @@ public class TranslogTests extends ESTestCase { ArrayList ops = new ArrayList<>(); Translog.Snapshot snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(0)); - snapshot.close(); addToTranslogAndList(translog, ops, new Translog.Index("test", "1", new byte[]{1})); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(ops.size())); - snapshot.close(); + assertThat(snapshot.totalOperations(), equalTo(ops.size())); addToTranslogAndList(translog, ops, new Translog.Delete(newUid("2"))); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(ops.size())); - snapshot.close(); + assertThat(snapshot.totalOperations(), equalTo(ops.size())); snapshot = translog.newSnapshot(); @@ -260,22 +257,18 @@ public class TranslogTests extends ESTestCase { assertThat(snapshot.next(), equalTo(null)); - snapshot.close(); - long firstId = translog.currentFileGeneration(); translog.prepareCommit(); assertThat(translog.currentFileGeneration(), Matchers.not(equalTo(firstId))); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(ops.size())); - snapshot.close(); + assertThat(snapshot.totalOperations(), equalTo(ops.size())); translog.commit(); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(0)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(0)); - snapshot.close(); + assertThat(snapshot.totalOperations(), equalTo(0)); } protected TranslogStats stats() throws IOException { @@ -337,9 +330,9 @@ public class TranslogTests extends ESTestCase { assertEquals(6, copy.estimatedNumberOfOperations()); assertEquals(431, copy.getTranslogSizeInBytes()); assertEquals("\"translog\"{\n" + - " \"operations\" : 6,\n" + - " \"size_in_bytes\" : 431\n" + - "}", copy.toString().trim()); + " \"operations\" : 6,\n" + + " \"size_in_bytes\" : 431\n" + + "}", copy.toString().trim()); try { new TranslogStats(1, -1); @@ -359,51 +352,43 @@ public class TranslogTests extends ESTestCase { ArrayList ops = new ArrayList<>(); Translog.Snapshot snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(0)); - snapshot.close(); addToTranslogAndList(translog, ops, new Translog.Index("test", "1", new byte[]{1})); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(1)); - snapshot.close(); + assertThat(snapshot.totalOperations(), equalTo(1)); snapshot = translog.newSnapshot(); - assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(1)); - - // snapshot while another is open Translog.Snapshot snapshot1 = translog.newSnapshot(); - assertThat(snapshot1, SnapshotMatchers.size(1)); - assertThat(snapshot1.estimatedTotalOperations(), equalTo(1)); + assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); + assertThat(snapshot.totalOperations(), equalTo(1)); - snapshot.close(); - snapshot1.close(); + assertThat(snapshot1, SnapshotMatchers.size(1)); + assertThat(snapshot1.totalOperations(), equalTo(1)); } public void testSnapshotWithNewTranslog() throws IOException { ArrayList ops = new ArrayList<>(); Translog.Snapshot snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(0)); - snapshot.close(); addToTranslogAndList(translog, ops, new Translog.Index("test", "1", new byte[]{1})); Translog.Snapshot snapshot1 = translog.newSnapshot(); addToTranslogAndList(translog, ops, new Translog.Index("test", "2", new byte[]{2})); + assertThat(snapshot1, SnapshotMatchers.equalsTo(ops.get(0))); + translog.prepareCommit(); addToTranslogAndList(translog, ops, new Translog.Index("test", "3", new byte[]{3})); - Translog.Snapshot snapshot2 = translog.newSnapshot(); - translog.commit(); - assertThat(snapshot2, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot2.estimatedTotalOperations(), equalTo(ops.size())); - - - assertThat(snapshot1, SnapshotMatchers.equalsTo(ops.get(0))); - snapshot1.close(); - snapshot2.close(); + try (Translog.View view = translog.newView()) { + Translog.Snapshot snapshot2 = translog.newSnapshot(); + translog.commit(); + assertThat(snapshot2, SnapshotMatchers.equalsTo(ops)); + assertThat(snapshot2.totalOperations(), equalTo(ops.size())); + } } public void testSnapshotOnClosedTranslog() throws IOException { @@ -418,39 +403,6 @@ public class TranslogTests extends ESTestCase { } } - public void testDeleteOnSnapshotRelease() throws Exception { - ArrayList firstOps = new ArrayList<>(); - addToTranslogAndList(translog, firstOps, new Translog.Index("test", "1", new byte[]{1})); - - Translog.Snapshot firstSnapshot = translog.newSnapshot(); - assertThat(firstSnapshot.estimatedTotalOperations(), equalTo(1)); - translog.commit(); - assertFileIsPresent(translog, 1); - - - ArrayList secOps = new ArrayList<>(); - addToTranslogAndList(translog, secOps, new Translog.Index("test", "2", new byte[]{2})); - assertThat(firstSnapshot.estimatedTotalOperations(), equalTo(1)); - - Translog.Snapshot secondSnapshot = translog.newSnapshot(); - translog.add(new Translog.Index("test", "3", new byte[]{3})); - assertThat(secondSnapshot, SnapshotMatchers.equalsTo(secOps)); - assertThat(secondSnapshot.estimatedTotalOperations(), equalTo(1)); - assertFileIsPresent(translog, 1); - assertFileIsPresent(translog, 2); - - firstSnapshot.close(); - assertFileDeleted(translog, 1); - assertFileIsPresent(translog, 2); - secondSnapshot.close(); - assertFileIsPresent(translog, 2); // it's the current nothing should be deleted - translog.commit(); - assertFileIsPresent(translog, 3); // it's the current nothing should be deleted - assertFileDeleted(translog, 2); - - } - - public void assertFileIsPresent(Translog translog, long id) { if (Files.exists(translogDir.resolve(Translog.getFilename(id)))) { return; @@ -624,14 +576,8 @@ public class TranslogTests extends ESTestCase { Translog.Snapshot snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(1)); assertFileIsPresent(translog, 1); - assertThat(snapshot.estimatedTotalOperations(), equalTo(1)); - if (randomBoolean()) { - translog.close(); - snapshot.close(); - } else { - snapshot.close(); - translog.close(); - } + assertThat(snapshot.totalOperations(), equalTo(1)); + translog.close(); assertFileIsPresent(translog, 1); } @@ -708,16 +654,21 @@ public class TranslogTests extends ESTestCase { public void onFailure(Throwable t) { logger.error("--> reader [{}] had an error", t, threadId); errors.add(t); - closeView(); + try { + closeView(); + } catch (IOException e) { + logger.error("unexpected error while closing view, after failure"); + t.addSuppressed(e); + } } - void closeView() { + void closeView() throws IOException { if (view != null) { view.close(); } } - void newView() { + void newView() throws IOException { closeView(); view = translog.newView(); // captures the currently written ops so we know what to expect from the view @@ -738,17 +689,16 @@ public class TranslogTests extends ESTestCase { // these are what we expect the snapshot to return (and potentially some more). Set expectedOps = new HashSet<>(writtenOps.keySet()); expectedOps.removeAll(writtenOpsAtView); - try (Translog.Snapshot snapshot = view.snapshot()) { - Translog.Operation op; - while ((op = snapshot.next()) != null) { - expectedOps.remove(op); - } + Translog.Snapshot snapshot = view.snapshot(); + Translog.Operation op; + while ((op = snapshot.next()) != null) { + expectedOps.remove(op); } if (expectedOps.isEmpty() == false) { StringBuilder missed = new StringBuilder("missed ").append(expectedOps.size()).append(" operations"); boolean failed = false; - for (Translog.Operation op : expectedOps) { - final Translog.Location loc = writtenOps.get(op); + for (Translog.Operation expectedOp : expectedOps) { + final Translog.Location loc = writtenOps.get(expectedOp); if (loc.generation < view.minTranslogGeneration()) { // writtenOps is only updated after the op was written to the translog. This mean // that ops written to the translog before the view was taken (and will be missing from the view) @@ -756,7 +706,7 @@ public class TranslogTests extends ESTestCase { continue; } failed = true; - missed.append("\n --> [").append(op).append("] written at ").append(loc); + missed.append("\n --> [").append(expectedOp).append("] written at ").append(loc); } if (failed) { fail(missed.toString()); @@ -803,7 +753,6 @@ public class TranslogTests extends ESTestCase { } } - public void testSyncUpTo() throws IOException { int translogOperations = randomIntBetween(10, 100); int count = 0; @@ -875,7 +824,7 @@ public class TranslogTests extends ESTestCase { final Translog.Location lastLocation = translog.add(new Translog.Index("test", "" + translogOperations, Integer.toString(translogOperations).getBytes(Charset.forName("UTF-8")))); final Checkpoint checkpoint = Checkpoint.read(translog.location().resolve(Translog.CHECKPOINT_FILE_NAME)); - try (final ImmutableTranslogReader reader = translog.openReader(translog.location().resolve(Translog.getFilename(translog.currentFileGeneration())), checkpoint)) { + try (final TranslogReader reader = translog.openReader(translog.location().resolve(Translog.getFilename(translog.currentFileGeneration())), checkpoint)) { assertEquals(lastSynced + 1, reader.totalOperations()); for (int op = 0; op < translogOperations; op++) { Translog.Location location = locations.get(op); @@ -913,7 +862,7 @@ public class TranslogTests extends ESTestCase { } writer.sync(); - final TranslogReader reader = randomBoolean() ? writer : translog.openReader(writer.path(), Checkpoint.read(translog.location().resolve(Translog.CHECKPOINT_FILE_NAME))); + final BaseTranslogReader reader = randomBoolean() ? writer : translog.openReader(writer.path(), Checkpoint.read(translog.location().resolve(Translog.CHECKPOINT_FILE_NAME))); for (int i = 0; i < numOps; i++) { ByteBuffer buffer = ByteBuffer.allocate(4); reader.readBytes(buffer, reader.getFirstOperationOffset() + 4 * i); @@ -926,7 +875,7 @@ public class TranslogTests extends ESTestCase { out.writeInt(2048); writer.add(new BytesArray(bytes)); - if (reader instanceof ImmutableTranslogReader) { + if (reader instanceof TranslogReader) { ByteBuffer buffer = ByteBuffer.allocate(4); try { reader.readBytes(buffer, reader.getFirstOperationOffset() + 4 * numOps); @@ -934,6 +883,7 @@ public class TranslogTests extends ESTestCase { } catch (EOFException ex) { // expected } + ((TranslogReader) reader).close(); } else { // live reader! ByteBuffer buffer = ByteBuffer.allocate(4); @@ -943,7 +893,7 @@ public class TranslogTests extends ESTestCase { final int value = buffer.getInt(); assertEquals(2048, value); } - IOUtils.close(writer, reader); + IOUtils.close(writer); } public void testBasicRecovery() throws IOException { @@ -971,19 +921,17 @@ public class TranslogTests extends ESTestCase { assertEquals(0, translog.stats().estimatedNumberOfOperations()); assertEquals(1, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - assertNull(snapshot.next()); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + assertNull(snapshot.next()); } else { assertEquals("lastCommitted must be 1 less than current", translogGeneration.translogFileGeneration + 1, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - for (int i = minUncommittedOp; i < translogOperations; i++) { - assertEquals("expected operation" + i + " to be in the previous translog but wasn't", translog.currentFileGeneration() - 1, locations.get(i).generation); - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + for (int i = minUncommittedOp; i < translogOperations; i++) { + assertEquals("expected operation" + i + " to be in the previous translog but wasn't", translog.currentFileGeneration() - 1, locations.get(i).generation); + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1014,13 +962,12 @@ public class TranslogTests extends ESTestCase { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - int upTo = sync ? translogOperations : prepareOp; - for (int i = 0; i < upTo; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + int upTo = sync ? translogOperations : prepareOp; + for (int i = 0; i < upTo; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null synced: " + sync, next); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); } } if (randomBoolean()) { // recover twice @@ -1028,13 +975,12 @@ public class TranslogTests extends ESTestCase { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 3 less than current - we never finished the commit and run recovery twice", translogGeneration.translogFileGeneration + 3, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - int upTo = sync ? translogOperations : prepareOp; - for (int i = 0; i < upTo; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + int upTo = sync ? translogOperations : prepareOp; + for (int i = 0; i < upTo; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null synced: " + sync, next); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1071,14 +1017,14 @@ public class TranslogTests extends ESTestCase { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - int upTo = sync ? translogOperations : prepareOp; - for (int i = 0; i < upTo; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + int upTo = sync ? translogOperations : prepareOp; + for (int i = 0; i < upTo; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null synced: " + sync, next); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); } + } if (randomBoolean()) { // recover twice @@ -1086,13 +1032,12 @@ public class TranslogTests extends ESTestCase { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 3 less than current - we never finished the commit and run recovery twice", translogGeneration.translogFileGeneration + 3, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - int upTo = sync ? translogOperations : prepareOp; - for (int i = 0; i < upTo; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + int upTo = sync ? translogOperations : prepareOp; + for (int i = 0; i < upTo; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null synced: " + sync, next); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1132,13 +1077,12 @@ public class TranslogTests extends ESTestCase { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - int upTo = sync ? translogOperations : prepareOp; - for (int i = 0; i < upTo; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + int upTo = sync ? translogOperations : prepareOp; + for (int i = 0; i < upTo; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null synced: " + sync, next); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1209,14 +1153,13 @@ public class TranslogTests extends ESTestCase { } config.setTranslogGeneration(translogGeneration); this.translog = new Translog(config); - try (Translog.Snapshot snapshot = this.translog.newSnapshot()) { - for (int i = firstUncommitted; i < translogOperations; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("" + i, next); - assertEquals(Integer.parseInt(next.getSource().source.toUtf8()), i); - } - assertNull(snapshot.next()); + Translog.Snapshot snapshot = this.translog.newSnapshot(); + for (int i = firstUncommitted; i < translogOperations; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("" + i, next); + assertEquals(Integer.parseInt(next.getSource().source.toUtf8()), i); } + assertNull(snapshot.next()); } public void testFailOnClosedWrite() throws IOException { @@ -1287,12 +1230,12 @@ public class TranslogTests extends ESTestCase { case CREATE: case INDEX: op = new Translog.Index("test", threadId + "_" + opCount, - randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8")); + randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8")); break; case DELETE: op = new Translog.Delete(new Term("_uid", threadId + "_" + opCount), - 1 + randomInt(100000), - randomFrom(VersionType.values())); + 1 + randomInt(100000), + randomFrom(VersionType.values())); break; default: throw new ElasticsearchException("not supported op type"); @@ -1383,14 +1326,13 @@ public class TranslogTests extends ESTestCase { assertEquals("lastCommitted must be 1 less than current", translogGeneration.translogFileGeneration + 1, tlog.currentFileGeneration()); assertFalse(tlog.syncNeeded()); - try (Translog.Snapshot snapshot = tlog.newSnapshot()) { - assertEquals(opsSynced, snapshot.estimatedTotalOperations()); - for (int i = 0; i < opsSynced; i++) { - assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, locations.get(i).generation); - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = tlog.newSnapshot(); + assertEquals(opsSynced, snapshot.totalOperations()); + for (int i = 0; i < opsSynced; i++) { + assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, locations.get(i).generation); + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1401,13 +1343,12 @@ public class TranslogTests extends ESTestCase { LineFileDocs lineFileDocs = new LineFileDocs(random()); // writes pretty big docs so we cross buffer boarders regularly for (int opsAdded = 0; opsAdded < numOps; opsAdded++) { locations.add(translog.add(new Translog.Index("test", "" + opsAdded, lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))))); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - assertEquals(opsAdded + 1, snapshot.estimatedTotalOperations()); - for (int i = 0; i < opsAdded; i++) { - assertEquals("expected operation" + i + " to be in the current translog but wasn't", translog.currentFileGeneration(), locations.get(i).generation); - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - } + Translog.Snapshot snapshot = this.translog.newSnapshot(); + assertEquals(opsAdded + 1, snapshot.totalOperations()); + for (int i = 0; i < opsAdded; i++) { + assertEquals("expected operation" + i + " to be in the current translog but wasn't", translog.currentFileGeneration(), locations.get(i).generation); + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); } } } @@ -1511,20 +1452,20 @@ public class TranslogTests extends ESTestCase { } config.setTranslogGeneration(translog.getGeneration()); try (Translog tlog = new Translog(config)) { - try (Translog.Snapshot snapshot = tlog.newSnapshot()) { - if (writtenOperations.size() != snapshot.estimatedTotalOperations()) { - for (int i = 0; i < threadCount; i++) { - if (threadExceptions[i] != null) - threadExceptions[i].printStackTrace(); + Translog.Snapshot snapshot = tlog.newSnapshot(); + if (writtenOperations.size() != snapshot.totalOperations()) { + for (int i = 0; i < threadCount; i++) { + if (threadExceptions[i] != null) { + threadExceptions[i].printStackTrace(); } } - assertEquals(writtenOperations.size(), snapshot.estimatedTotalOperations()); - for (int i = 0; i < writtenOperations.size(); i++) { - assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, writtenOperations.get(i).location.generation); - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals(next, writtenOperations.get(i).operation); - } + } + assertEquals(writtenOperations.size(), snapshot.totalOperations()); + for (int i = 0; i < writtenOperations.size(); i++) { + assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, writtenOperations.get(i).location.generation); + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals(next, writtenOperations.get(i).operation); } } } @@ -1537,6 +1478,7 @@ public class TranslogTests extends ESTestCase { private static class FailSwitch { private volatile int failRate; private volatile boolean onceFailedFailAlways = false; + public boolean fail() { boolean fail = randomIntBetween(1, 100) <= failRate; if (fail && onceFailedFailAlways) { @@ -1716,24 +1658,22 @@ public class TranslogTests extends ESTestCase { try (Translog tlog = new Translog(config)) { assertNotNull(translogGeneration); assertFalse(tlog.syncNeeded()); - try (Translog.Snapshot snapshot = tlog.newSnapshot()) { - for (int i = 0; i < 1; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = tlog.newSnapshot(); + for (int i = 0; i < 1; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); } tlog.add(new Translog.Index("test", "" + 1, Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } try (Translog tlog = new Translog(config)) { assertNotNull(translogGeneration); assertFalse(tlog.syncNeeded()); - try (Translog.Snapshot snapshot = tlog.newSnapshot()) { - for (int i = 0; i < 2; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = tlog.newSnapshot(); + for (int i = 0; i < 2; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1749,7 +1689,7 @@ public class TranslogTests extends ESTestCase { Files.createFile(config.getTranslogPath().resolve("translog-" + (read.generation + 1) + ".tlog")); config.setTranslogGeneration(translogGeneration); - try { + try { Translog tlog = new Translog(config); fail("file already exists?"); } catch (TranslogException ex) { @@ -1758,6 +1698,7 @@ public class TranslogTests extends ESTestCase { assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class); } } + public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException { translog.add(new Translog.Index("test", "" + 0, Integer.toString(0).getBytes(Charset.forName("UTF-8")))); Translog.TranslogGeneration translogGeneration = translog.getGeneration(); @@ -1774,17 +1715,16 @@ public class TranslogTests extends ESTestCase { try (Translog tlog = new Translog(config)) { assertNotNull(translogGeneration); assertFalse(tlog.syncNeeded()); - try (Translog.Snapshot snapshot = tlog.newSnapshot()) { - for (int i = 0; i < 1; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = tlog.newSnapshot(); + for (int i = 0; i < 1; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); } tlog.add(new Translog.Index("test", "" + 1, Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } - try { + try { Translog tlog = new Translog(config); fail("file already exists?"); } catch (TranslogException ex) { @@ -1863,13 +1803,12 @@ public class TranslogTests extends ESTestCase { } try (Translog translog = new Translog(config)) { - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - assertEquals(syncedDocs.size(), snapshot.estimatedTotalOperations()); - for (int i = 0; i < syncedDocs.size(); i++) { - Translog.Operation next = snapshot.next(); - assertEquals(syncedDocs.get(i), next.getSource().source.toUtf8()); - assertNotNull("operation " + i + " must be non-null", next); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + assertEquals(syncedDocs.size(), snapshot.totalOperations()); + for (int i = 0; i < syncedDocs.size(); i++) { + Translog.Operation next = snapshot.next(); + assertEquals(syncedDocs.get(i), next.getSource().source.toUtf8()); + assertNotNull("operation " + i + " must be non-null", next); } } } diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java index 68f26c504fb..8ae7117d483 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.translog; import org.apache.lucene.util.IOUtils; -import org.elasticsearch.index.VersionType; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -29,66 +28,32 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; /** * Tests for reading old and new translog files */ public class TranslogVersionTests extends ESTestCase { - public void testV0LegacyTranslogVersion() throws Exception { - Path translogFile = getDataPath("/org/elasticsearch/index/translog/translog-v0.binary"); - assertThat("test file should exist", Files.exists(translogFile), equalTo(true)); - try (ImmutableTranslogReader reader = openReader(translogFile, 0)) { - assertThat("a version0 stream is returned", reader instanceof LegacyTranslogReader, equalTo(true)); - try (final Translog.Snapshot snapshot = reader.newSnapshot()) { - final Translog.Operation operation = snapshot.next(); - assertThat("operation is the correct type correctly", operation.opType() == Translog.Operation.Type.INDEX, equalTo(true)); - Translog.Index op = (Translog.Index) operation; - assertThat(op.id(), equalTo("1")); - assertThat(op.type(), equalTo("doc")); - assertThat(op.source().toUtf8(), equalTo("{\"body\": \"worda wordb wordc wordd \\\"worde\\\" wordf\"}")); - assertThat(op.routing(), equalTo(null)); - assertThat(op.parent(), equalTo(null)); - assertThat(op.version(), equalTo(1L)); - assertThat(op.timestamp(), equalTo(1407312091791L)); - assertThat(op.ttl(), equalTo(-1L)); - assertThat(op.versionType(), equalTo(VersionType.INTERNAL)); - assertNull(snapshot.next()); - } + private void checkFailsToOpen(String file, String expectedMessage) throws IOException { + Path translogFile = getDataPath(file); + assertThat("test file should exist", Files.exists(translogFile), equalTo(true)); + try { + openReader(translogFile, 0); + fail("should be able to open an old translog"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString(expectedMessage)); } + + } + + public void testV0LegacyTranslogVersion() throws Exception { + checkFailsToOpen("/org/elasticsearch/index/translog/translog-v0.binary", "pre-1.4 translog"); } public void testV1ChecksummedTranslogVersion() throws Exception { - Path translogFile = getDataPath("/org/elasticsearch/index/translog/translog-v1.binary"); - assertThat("test file should exist", Files.exists(translogFile), equalTo(true)); - try (ImmutableTranslogReader reader = openReader(translogFile, 0)) { - try (final Translog.Snapshot snapshot = reader.newSnapshot()) { - - assertThat("a version1 stream is returned", reader instanceof ImmutableTranslogReader, equalTo(true)); - - Translog.Operation operation = snapshot.next(); - - assertThat("operation is the correct type correctly", operation.opType() == Translog.Operation.Type.INDEX, equalTo(true)); - Translog.Index op = (Translog.Index) operation; - assertThat(op.id(), equalTo("Bwiq98KFSb6YjJQGeSpeiw")); - assertThat(op.type(), equalTo("doc")); - assertThat(op.source().toUtf8(), equalTo("{\"body\": \"foo\"}")); - assertThat(op.routing(), equalTo(null)); - assertThat(op.parent(), equalTo(null)); - assertThat(op.version(), equalTo(1L)); - assertThat(op.timestamp(), equalTo(1408627184844L)); - assertThat(op.ttl(), equalTo(-1L)); - assertThat(op.versionType(), equalTo(VersionType.INTERNAL)); - - // There are more operations - int opNum = 1; - while (snapshot.next() != null) { - opNum++; - } - assertThat("there should be 5 translog operations", opNum, equalTo(5)); - } - } + checkFailsToOpen("/org/elasticsearch/index/translog/translog-v1.binary", "pre-2.0 translog"); } public void testCorruptedTranslogs() throws Exception { @@ -112,47 +77,17 @@ public class TranslogVersionTests extends ESTestCase { e.getMessage().contains("Invalid first byte in translog file, got: 1, expected 0x00 or 0x3f"), equalTo(true)); } - try { - Path translogFile = getDataPath("/org/elasticsearch/index/translog/translog-v1-corrupted-body.binary"); - assertThat("test file should exist", Files.exists(translogFile), equalTo(true)); - try (ImmutableTranslogReader reader = openReader(translogFile, 0)) { - try (final Translog.Snapshot snapshot = reader.newSnapshot()) { - while(snapshot.next() != null) { - - } - } - } - fail("should have thrown an exception about the body being corrupted"); - } catch (TranslogCorruptedException e) { - assertThat("translog corruption from body: " + e.getMessage(), - e.getMessage().contains("translog corruption while reading from stream"), equalTo(true)); - } - + checkFailsToOpen("/org/elasticsearch/index/translog/translog-v1-corrupted-body.binary", "pre-2.0 translog"); } public void testTruncatedTranslog() throws Exception { - try { - Path translogFile = getDataPath("/org/elasticsearch/index/translog/translog-v1-truncated.binary"); - assertThat("test file should exist", Files.exists(translogFile), equalTo(true)); - try (ImmutableTranslogReader reader = openReader(translogFile, 0)) { - try (final Translog.Snapshot snapshot = reader.newSnapshot()) { - while(snapshot.next() != null) { - - } - } - } - fail("should have thrown an exception about the body being truncated"); - } catch (TranslogCorruptedException e) { - assertThat("translog truncated: " + e.getMessage(), - e.getMessage().contains("operation size is corrupted must be"), equalTo(true)); - } + checkFailsToOpen("/org/elasticsearch/index/translog/translog-v1-truncated.binary", "pre-2.0 translog"); } - public ImmutableTranslogReader openReader(Path path, long id) throws IOException { + public TranslogReader openReader(Path path, long id) throws IOException { FileChannel channel = FileChannel.open(path, StandardOpenOption.READ); try { - final ChannelReference raf = new ChannelReference(path, id, channel, null); - ImmutableTranslogReader reader = ImmutableTranslogReader.open(raf, new Checkpoint(Files.size(path), TranslogReader.UNKNOWN_OP_COUNT, id), null); + TranslogReader reader = TranslogReader.open(channel, path, new Checkpoint(Files.size(path), 1, id), null); channel = null; return reader; } finally { diff --git a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java index 8abe19ffbb6..4853d59588b 100644 --- a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java @@ -63,7 +63,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { Settings settings = settingsBuilder() .put(SETTING_NUMBER_OF_SHARDS, 11) .put(SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "0s") + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "0s") .build(); // start one server diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java index 4f6aaf25705..ed8c27892b9 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java @@ -84,7 +84,7 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase { @Override public void beforeIndexAddedToCluster(Index index, Settings indexSettings) { beforeAddedCount.incrementAndGet(); - if (indexSettings.getAsBoolean("index.fail", false)) { + if (MockIndexEventListener.TestPlugin.INDEX_FAIL.get(indexSettings)) { throw new ElasticsearchException("failing on purpose"); } } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java deleted file mode 100644 index ec182a69890..00000000000 --- a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.indices; - -import org.elasticsearch.common.inject.ModuleTestCase; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.index.query.TermQueryParser; - -import java.io.IOException; - -public class IndicesModuleTests extends ModuleTestCase { - - static class FakeQueryParser implements QueryParser { - @Override - public String[] names() { - return new String[] {"fake-query-parser"}; - } - - @Override - public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { - return null; - } - - @Override - public QueryBuilder getBuilderPrototype() { - return null; - } - } - - public void testRegisterQueryParser() { - IndicesModule module = new IndicesModule(); - module.registerQueryParser(FakeQueryParser.class); - assertSetMultiBinding(module, QueryParser.class, FakeQueryParser.class); - } - - public void testRegisterQueryParserDuplicate() { - IndicesModule module = new IndicesModule(); - try { - module.registerQueryParser(TermQueryParser.class); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Can't register the same [query_parser] more than once for [" + TermQueryParser.class.getName() + "]"); - } - } - - public void testRegisterHunspellDictionaryDuplicate() { - IndicesModule module = new IndicesModule(); - try { - module.registerQueryParser(TermQueryParser.class); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Can't register the same [query_parser] more than once for [" + TermQueryParser.class.getName() + "]"); - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index aeb4ac55410..6d0a8955116 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -46,11 +46,17 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.suggest.SuggestBuilders; import org.elasticsearch.test.ESIntegTestCase; +import java.util.Collection; +import java.util.function.Function; + import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -60,6 +66,12 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class IndicesOptionsIntegrationIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(TestPlugin.class); // + } + public void testSpecifiedIndexUnavailableMultipleIndices() throws Exception { createIndex("test1"); ensureYellow(); @@ -619,6 +631,29 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type4"), notNullValue()); } + public static final class TestPlugin extends Plugin { + @Override + public String name() { + return "index-a-setting"; + } + + @Override + public String description() { + return "a plugin that adds a dynamic tst setting"; + } + + private static final Setting INDEX_A = new Setting<>("index.a", "", Function.identity(), true, Setting.Scope.INDEX); + private static final Setting INDEX_C = new Setting<>("index.c", "", Function.identity(), true, Setting.Scope.INDEX); + private static final Setting INDEX_E = new Setting<>("index.e", "", Function.identity(), false, Setting.Scope.INDEX); + + + public void onModule(SettingsModule module) { + module.registerSetting(INDEX_A); + module.registerSetting(INDEX_C); + module.registerSetting(INDEX_E); + } + } + public void testUpdateSettings() throws Exception { verify(client().admin().indices().prepareUpdateSettings("foo").setSettings(Settings.builder().put("a", "b")), true); verify(client().admin().indices().prepareUpdateSettings("_all").setSettings(Settings.builder().put("a", "b")), true); diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java index 4130cf5ad8a..cd4ca35bbc5 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESBackcompatTestCase; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.util.ArrayList; @@ -46,7 +47,7 @@ import static org.hamcrest.Matchers.notNullValue; public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(DummyAnalysisPlugin.class); + return pluginList(DummyAnalysisPlugin.class, InternalSettingsPlugin.class); } public void testThatPreBuiltAnalyzersAreNotClosedOnIndexClose() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java index ffb86c77c72..46402c6054a 100644 --- a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java +++ b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java @@ -40,7 +40,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { public void testCacheAggs() throws Exception { assertAcked(client().admin().indices().prepareCreate("index") .addMapping("type", "f", "type=date") - .setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, true).get()); + .setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true).get()); indexRandom(true, client().prepareIndex("index", "type").setSource("f", "2014-03-10T00:00:00.000Z"), client().prepareIndex("index", "type").setSource("f", "2014-05-13T00:00:00.000Z")); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index 370770481cb..c8a80f6621f 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -153,7 +153,7 @@ public class FlushIT extends ESIntegTestCase { createIndex("test"); client().admin().indices().prepareUpdateSettings("test").setSettings( - Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)).put("index.refresh_interval", -1).put("index.number_of_replicas", internalCluster().numDataNodes() - 1)) + Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)).put("index.refresh_interval", -1).put("index.number_of_replicas", internalCluster().numDataNodes() - 1)) .get(); ensureGreen(); final AtomicBoolean stop = new AtomicBoolean(false); diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index f4f7fe0a1df..4807cf61dea 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -30,7 +30,9 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.Requests; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.MockEngineFactoryPlugin; @@ -58,7 +60,7 @@ import static org.hamcrest.Matchers.equalTo; public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class); + return pluginList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class, MockEngineFactoryPlugin.class); } public void testBreakerWithRandomExceptions() throws IOException, InterruptedException, ExecutionException { @@ -195,6 +197,8 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { // TODO: Generalize this class and add it as a utility public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper { + public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); public static class TestPlugin extends Plugin { @Override public String name() { @@ -205,6 +209,11 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { return "a mock reader wrapper that throws random exceptions for testing"; } + public void onModule(SettingsModule module) { + module.registerSetting(EXCEPTION_TOP_LEVEL_RATIO_SETTING); + module.registerSetting(EXCEPTION_LOW_LEVEL_RATIO_SETTING); + } + public void onModule(MockEngineFactoryPlugin.MockEngineReaderModule module) { module.setReaderClass(RandomExceptionDirectoryReaderWrapper.class); } @@ -218,9 +227,9 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { private final double lowLevelRatio; ThrowingSubReaderWrapper(Settings settings) { - final long seed = settings.getAsLong(SETTING_INDEX_SEED, 0l); - this.topLevelRatio = settings.getAsDouble(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d); - this.lowLevelRatio = settings.getAsDouble(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d); + final long seed = ESIntegTestCase.INDEX_TEST_SEED_SETTING.get(settings); + this.topLevelRatio = EXCEPTION_TOP_LEVEL_RATIO_SETTING.get(settings); + this.lowLevelRatio = EXCEPTION_LOW_LEVEL_RATIO_SETTING.get(settings); this.random = new Random(seed); } diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index dc61d4bc5fe..dacf23758e8 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -52,6 +52,7 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSDirectoryService; +import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.Transport; @@ -97,7 +98,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(MockTransportService.TestPlugin.class); + return pluginList(MockTransportService.TestPlugin.class, MockFSIndexStore.TestPlugin.class); } private void assertRecoveryStateWithoutStage(RecoveryState state, int shardId, Type type, @@ -497,7 +498,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { logger.info("--> creating test index: {}", name); assertAcked(prepareCreate(name, nodeCount, settingsBuilder().put("number_of_shards", shardCount) - .put("number_of_replicas", replicaCount).put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0))); + .put("number_of_replicas", replicaCount).put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), 0))); ensureGreen(); logger.info("--> indexing sample data"); @@ -530,7 +531,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { final Settings nodeSettings = Settings.builder() .put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), "100ms") .put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), "1s") - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) // restarted recoveries will delete temp files and write them again + .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) // restarted recoveries will delete temp files and write them again .build(); // start a master node internalCluster().startNode(nodeSettings); @@ -547,7 +548,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { client().admin().indices().prepareCreate(indexName) .setSettings( Settings.builder() - .put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "color", "blue") + .put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "color", "blue") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) ).get(); @@ -593,7 +594,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { logger.info("--> starting recovery from blue to red"); client().admin().indices().prepareUpdateSettings(indexName).setSettings( Settings.builder() - .put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "color", "red,blue") + .put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "color", "red,blue") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) ).get(); diff --git a/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java b/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java index 4886ee0886b..de5bbfbe837 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java @@ -41,7 +41,7 @@ public class GetSettingsBlocksIT extends ESIntegTestCase { .setSettings(Settings.settingsBuilder() .put("index.refresh_interval", -1) .put("index.merge.policy.expunge_deletes_allowed", "30") - .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, false))); + .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false))); for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { try { @@ -50,7 +50,7 @@ public class GetSettingsBlocksIT extends ESIntegTestCase { assertThat(response.getIndexToSettings().size(), greaterThanOrEqualTo(1)); assertThat(response.getSetting("test", "index.refresh_interval"), equalTo("-1")); assertThat(response.getSetting("test", "index.merge.policy.expunge_deletes_allowed"), equalTo("30")); - assertThat(response.getSetting("test", MapperService.INDEX_MAPPER_DYNAMIC_SETTING), equalTo("false")); + assertThat(response.getSetting("test", MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey()), equalTo("false")); } finally { disableIndexBlock("test", block); } diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java index 53a93fd0bd1..dc48b96c23f 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java @@ -263,16 +263,16 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { public void testUpdateWithInvalidNumberOfReplicas() { createIndex("test"); + final int value = randomIntBetween(-10, -1); try { client().admin().indices().prepareUpdateSettings("test") .setSettings(Settings.settingsBuilder() - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(-10, -1)) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, value) ) .execute().actionGet(); fail("should have thrown an exception about the replica shard count"); } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("the value of the setting index.number_of_replicas must be a non negative integer"), equalTo(true)); + assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_replicas] must be >= 0", e.getMessage()); } } } diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index 52b4c653746..67fc5acd09a 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -30,11 +30,13 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -50,13 +52,47 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class UpdateSettingsIT extends ESIntegTestCase { + + public void testResetDefault() { + createIndex("test"); + + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.settingsBuilder() + .put("index.refresh_interval", -1) + .put("index.translog.flush_threshold_size", "1024b") + ) + .execute().actionGet(); + IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); + assertEquals(indexMetaData.getSettings().get("index.refresh_interval"), "-1"); + for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { + IndexService indexService = service.indexService("test"); + if (indexService != null) { + assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), -1); + assertEquals(indexService.getIndexSettings().getFlushThresholdSize().bytes(), 1024); + } + } + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.settingsBuilder() + .putNull("index.refresh_interval") + ) + .execute().actionGet(); + indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); + assertNull(indexMetaData.getSettings().get("index.refresh_interval")); + for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { + IndexService indexService = service.indexService("test"); + if (indexService != null) { + assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), 1000); + assertEquals(indexService.getIndexSettings().getFlushThresholdSize().bytes(), 1024); + } + } + } public void testOpenCloseUpdateSettings() throws Exception { createIndex("test"); try { client().admin().indices().prepareUpdateSettings("test") .setSettings(Settings.settingsBuilder() .put("index.refresh_interval", -1) // this one can change - .put("index.cache.filter.type", "none") // this one can't + .put("index.fielddata.cache", "none") // this one can't ) .execute().actionGet(); fail(); @@ -66,12 +102,12 @@ public class UpdateSettingsIT extends ESIntegTestCase { IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getSettings().get("index.refresh_interval"), nullValue()); - assertThat(indexMetaData.getSettings().get("index.cache.filter.type"), nullValue()); + assertThat(indexMetaData.getSettings().get("index.fielddata.cache"), nullValue()); // Now verify via dedicated get settings api: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), nullValue()); - assertThat(getSettingsResponse.getSetting("test", "index.cache.filter.type"), nullValue()); + assertThat(getSettingsResponse.getSetting("test", "index.fielddata.cache"), nullValue()); client().admin().indices().prepareUpdateSettings("test") .setSettings(Settings.settingsBuilder() @@ -107,18 +143,18 @@ public class UpdateSettingsIT extends ESIntegTestCase { client().admin().indices().prepareUpdateSettings("test") .setSettings(Settings.settingsBuilder() .put("index.refresh_interval", "1s") // this one can change - .put("index.cache.filter.type", "none") // this one can't + .put("index.fielddata.cache", "none") // this one can't ) .execute().actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getSettings().get("index.refresh_interval"), equalTo("1s")); - assertThat(indexMetaData.getSettings().get("index.cache.filter.type"), equalTo("none")); + assertThat(indexMetaData.getSettings().get("index.fielddata.cache"), equalTo("none")); // Now verify via dedicated get settings api: getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), equalTo("1s")); - assertThat(getSettingsResponse.getSetting("test", "index.cache.filter.type"), equalTo("none")); + assertThat(getSettingsResponse.getSetting("test", "index.fielddata.cache"), equalTo("none")); } public void testEngineGCDeletesSetting() throws InterruptedException { @@ -142,14 +178,14 @@ public class UpdateSettingsIT extends ESIntegTestCase { // No throttling at first, only 1 non-replicated shard, force lots of merging: assertAcked(prepareCreate("test") .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "none") + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "none") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "2") - .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0) // get stats all the time - no caching + .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "2") + .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), 0) // get stats all the time - no caching )); ensureGreen(); long termUpto = 0; @@ -180,13 +216,13 @@ public class UpdateSettingsIT extends ESIntegTestCase { .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "merge") - .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, "1mb")) + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "merge") + .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), "1mb")) .get(); // Make sure setting says it is in fact changed: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); - assertThat(getSettingsResponse.getSetting("test", IndexStore.INDEX_STORE_THROTTLE_TYPE), equalTo("merge")); + assertThat(getSettingsResponse.getSetting("test", IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey()), equalTo("merge")); // Also make sure we see throttling kicking in: boolean done = false; @@ -220,7 +256,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "none")) + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "none")) .get(); // Optimize does a waitForMerges, which we must do to make sure all in-flight (throttled) merges finish: @@ -310,11 +346,11 @@ public class UpdateSettingsIT extends ESIntegTestCase { .setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "2") - .put(MergeSchedulerConfig.AUTO_THROTTLE, "true") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), "true") )); // Disable auto throttle: @@ -323,7 +359,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder() - .put(MergeSchedulerConfig.AUTO_THROTTLE, "no")) + .put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), "false")) .get(); // Make sure we log the change: @@ -331,7 +367,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { // Make sure setting says it is in fact changed: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); - assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.AUTO_THROTTLE), equalTo("no")); + assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey()), equalTo("false")); } finally { rootLogger.removeAppender(mockAppender); rootLogger.setLevel(savedLevel); @@ -352,21 +388,20 @@ public class UpdateSettingsIT extends ESIntegTestCase { .setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "10000") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "10000") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "10000") + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "10000") )); assertFalse(mockAppender.sawUpdateMaxThreadCount); - // Now make a live change to reduce allowed merge threads: client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder() - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") ) .get(); @@ -375,7 +410,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { // Make sure setting says it is in fact changed: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); - assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.MAX_THREAD_COUNT), equalTo("1")); + assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey()), equalTo("1")); } finally { rootLogger.removeAppender(mockAppender); diff --git a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java index 9522b79ea11..6eebc9e1f0c 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java @@ -139,7 +139,8 @@ public class SimpleIndexStateIT extends ESIntegTestCase { try { client().admin().indices().prepareCreate("test").setSettings(settingsBuilder().put("number_of_shards", "bad")).get(); fail(); - } catch (SettingsException ex) { + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [bad] for setting [index.number_of_shards]", ex.getMessage()); // Expected } diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index c6dbff842ef..18c03e3739a 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -79,8 +79,8 @@ public class IndexStatsIT extends ESIntegTestCase { //Filter/Query cache is cleaned periodically, default is 60s, so make sure it runs often. Thread.sleep for 60s is bad return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) .put(IndicesRequestCache.INDICES_CACHE_REQUEST_CLEAN_INTERVAL, "1ms") - .put(IndexModule.QUERY_CACHE_EVERYTHING, true) - .put(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE) + .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) .build(); } @@ -185,7 +185,7 @@ public class IndexStatsIT extends ESIntegTestCase { } public void testQueryCache() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, true).get()); + assertAcked(client().admin().indices().prepareCreate("idx").setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true).get()); ensureGreen(); // index docs until we have at least one doc on each shard, otherwise, our tests will not work @@ -265,7 +265,7 @@ public class IndexStatsIT extends ESIntegTestCase { // set the index level setting to false, and see that the reverse works client().admin().indices().prepareClearCache().setRequestCache(true).get(); // clean the cache - assertAcked(client().admin().indices().prepareUpdateSettings("idx").setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, false))); + assertAcked(client().admin().indices().prepareUpdateSettings("idx").setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), false))); assertThat(client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits(), equalTo((long) numDocs)); assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0l)); @@ -277,13 +277,13 @@ public class IndexStatsIT extends ESIntegTestCase { public void testNonThrottleStats() throws Exception { assertAcked(prepareCreate("test") .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "merge") + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "merge") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "10000") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "10000") )); ensureGreen(); long termUpto = 0; @@ -309,15 +309,14 @@ public class IndexStatsIT extends ESIntegTestCase { public void testThrottleStats() throws Exception { assertAcked(prepareCreate("test") .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "merge") + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "merge") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "1") - .put("index.merge.policy.type", "tiered") - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC.name()) + .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "1") + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC.name()) )); ensureGreen(); long termUpto = 0; diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 9c9cb01e597..18d56eee88f 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -306,7 +306,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 3) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "_name", node4) + .put(IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "_name", node4) )); assertFalse(client().admin().cluster().prepareHealth().setWaitForRelocatingShards(0).setWaitForGreenStatus().setWaitForNodes("5").get().isTimedOut()); @@ -328,7 +328,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { logger.debug("--> allowing index to be assigned to node [{}]", node4); assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings( Settings.builder() - .put(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "_name", "NONE"))); + .put(IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "_name", "NONE"))); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all"))); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index b32cfef76b6..5aaed6b5d94 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -321,21 +321,23 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get(); assertThat(response.getIndexTemplates(), empty()); - client().admin().indices().preparePutTemplate("template_1") + try { + client().admin().indices().preparePutTemplate("template_1") .setTemplate("te*") .setSettings(Settings.builder().put("does_not_exist", "test")) .get(); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("unknown setting [index.does_not_exist]", ex.getMessage()); + } response = client().admin().indices().prepareGetTemplates().get(); - assertThat(response.getIndexTemplates(), hasSize(1)); - assertThat(response.getIndexTemplates().get(0).getSettings().getAsMap().size(), equalTo(1)); - assertThat(response.getIndexTemplates().get(0).getSettings().get("index.does_not_exist"), equalTo("test")); + assertEquals(0, response.getIndexTemplates().size()); createIndex("test"); - //the wrong setting has no effect but does get stored among the index settings GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); - assertThat(getSettingsResponse.getIndexToSettings().get("test").getAsMap().get("index.does_not_exist"), equalTo("test")); + assertNull(getSettingsResponse.getIndexToSettings().get("test").getAsMap().get("index.does_not_exist")); } public void testIndexTemplateWithAliases() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java index def34b3818f..c50c191f8f4 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.percolate.PercolateShardRequest; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.Index; @@ -57,8 +56,9 @@ import org.junit.Before; import org.mockito.Mockito; import java.util.Collections; -import java.util.Set; +import java.util.Map; +import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -77,15 +77,13 @@ public class PercolateDocumentParserTests extends ESTestCase { Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .build(), - Settings.EMPTY, Collections.emptyList() - ); + .build(), Settings.EMPTY); AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); IndicesModule indicesModule = new IndicesModule(); mapperService = new MapperService(indexSettings, analysisService, new SimilarityService(indexSettings, Collections.emptyMap()), indicesModule.getMapperRegistry(), () -> null); - Set parsers = Collections.singleton(new TermQueryParser()); - IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(indexSettings.getSettings(), parsers, new NamedWriteableRegistry()); + Map> parsers = singletonMap("term", new TermQueryParser()); + IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(indexSettings.getSettings(), parsers); queryShardContext = new QueryShardContext(indexSettings, null, null, null, mapperService, null, null, indicesQueriesRegistry); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java index c32632bbf1d..b8ed2cc0e28 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java @@ -202,7 +202,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { } public void testSingleShardAggregations() throws Exception { - assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("SETTING_NUMBER_OF_SHARDS", 1)) + assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1)) .addMapping("type", "field1", "type=string", "field2", "type=string")); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java index 49635abc8de..d9c784da2b1 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java @@ -163,9 +163,7 @@ public class PercolatorServiceTests extends ESTestCase { Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .build(), - Settings.EMPTY, Collections.emptyList() - ); + .build(), Settings.EMPTY); return new PercolatorQueriesRegistry( new ShardId(index, 0), indexSettings, diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 660f1015c3d..5d8605ab19e 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -42,7 +42,7 @@ public class PluginsServiceTests extends ESTestCase { } @Override public Settings additionalSettings() { - return Settings.builder().put("foo.bar", "1").put(IndexModule.STORE_TYPE, IndexModule.Type.MMAPFS.getSettingsKey()).build(); + return Settings.builder().put("foo.bar", "1").put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.MMAPFS.getSettingsKey()).build(); } } public static class AdditionalSettingsPlugin2 extends Plugin { @@ -90,12 +90,12 @@ public class PluginsServiceTests extends ESTestCase { Settings settings = Settings.builder() .put("path.home", createTempDir()) .put("my.setting", "test") - .put(IndexModule.STORE_TYPE, IndexModule.Type.SIMPLEFS.getSettingsKey()).build(); + .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.SIMPLEFS.getSettingsKey()).build(); PluginsService service = newPluginsService(settings, AdditionalSettingsPlugin1.class); Settings newSettings = service.updatedSettings(); assertEquals("test", newSettings.get("my.setting")); // previous settings still exist assertEquals("1", newSettings.get("foo.bar")); // added setting exists - assertEquals(IndexModule.Type.SIMPLEFS.getSettingsKey(), newSettings.get(IndexModule.STORE_TYPE)); // does not override pre existing settings + assertEquals(IndexModule.Type.SIMPLEFS.getSettingsKey(), newSettings.get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey())); // does not override pre existing settings } public void testAdditionalSettingsClash() { diff --git a/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java index 8d33758ef26..3b61edf2923 100644 --- a/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java @@ -141,7 +141,7 @@ public class FullRollingRestartIT extends ESIntegTestCase { * to relocating to the restarting node since all had 2 shards and now one node has nothing allocated. * We have a fix for this to wait until we have allocated unallocated shards now so this shouldn't happen. */ - prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6").put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0").put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMinutes(1))).get(); + prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6").put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0").put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get(); for (int i = 0; i < 100; i++) { client().prepareIndex("test", "type1", Long.toString(i)) diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 438445d538a..e9349a97d7d 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -55,7 +55,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -108,7 +108,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasRelocatePrimariesTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -159,7 +159,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadWithReducedAllowedNodes() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 2, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 2, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -230,7 +230,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { final int numReplicas = 0; logger.info("--> creating test index ..."); int allowNodes = 2; - assertAcked(prepareCreate("test", 3, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 3, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int numDocs = scaledRandomIntBetween(200, 9999); diff --git a/core/src/test/java/org/elasticsearch/recovery/SmallTranslogSizeRecoveryIT.java b/core/src/test/java/org/elasticsearch/recovery/SmallTranslogSizeRecoveryIT.java deleted file mode 100644 index a39849e0f1c..00000000000 --- a/core/src/test/java/org/elasticsearch/recovery/SmallTranslogSizeRecoveryIT.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.recovery; - -import org.elasticsearch.common.settings.Settings; - -/** - * - */ -public class SmallTranslogSizeRecoveryIT extends SimpleRecoveryIT { - - @Override - protected Settings recoverySettings() { - return Settings.settingsBuilder().put("index.shard.recovery.translog_size", "3b").build(); - } -} diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 15313095650..b75d9634f69 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -18,19 +18,29 @@ */ package org.elasticsearch.search; +import java.io.IOException; + import org.elasticsearch.common.inject.ModuleTestCase; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.TermQueryParser; import org.elasticsearch.search.highlight.CustomHighlighter; import org.elasticsearch.search.highlight.Highlighter; import org.elasticsearch.search.highlight.PlainHighlighter; import org.elasticsearch.search.suggest.CustomSuggester; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.phrase.PhraseSuggester; + +import static org.hamcrest.Matchers.containsString; /** */ public class SearchModuleTests extends ModuleTestCase { public void testDoubleRegister() { - SearchModule module = new SearchModule(); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); try { module.registerHighlighter("fvh", PlainHighlighter.class); } catch (IllegalArgumentException e) { @@ -45,7 +55,7 @@ public class SearchModuleTests extends ModuleTestCase { } public void testRegisterSuggester() { - SearchModule module = new SearchModule(); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); module.registerSuggester("custom", CustomSuggester.class); try { module.registerSuggester("custom", CustomSuggester.class); @@ -56,7 +66,7 @@ public class SearchModuleTests extends ModuleTestCase { } public void testRegisterHighlighter() { - SearchModule module = new SearchModule(); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); module.registerHighlighter("custom", CustomHighlighter.class); try { module.registerHighlighter("custom", CustomHighlighter.class); @@ -65,4 +75,32 @@ public class SearchModuleTests extends ModuleTestCase { } assertMapMultiBinding(module, Highlighter.class, CustomHighlighter.class); } + + public void testRegisterQueryParserDuplicate() { + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); + module.registerQueryParser(TermQueryParser::new); + try { + module.buildQueryParserRegistry(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("already registered for name [term] while trying to register [org.elasticsearch.index.")); + } + } + + static class FakeQueryParser implements QueryParser { + @Override + public String[] names() { + return new String[] {"fake-query-parser"}; + } + + @Override + public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { + return null; + } + + @Override + public QueryBuilder getBuilderPrototype() { + return null; + } + } + } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index c76e3681eb8..57c4c8bc6fc 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -25,17 +25,20 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.hamcrest.Matchers; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -57,6 +60,12 @@ import static org.hamcrest.core.IsNull.nullValue; */ @ESIntegTestCase.SuiteScopeTestCase public class GeoDistanceIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java index 4fbbef8a58d..672447a6811 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java @@ -30,15 +30,18 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid.Bucket; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Random; @@ -53,6 +56,12 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; @ESIntegTestCase.SuiteScopeTestCase public class GeoHashGridIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); static ObjectIntMap expectedDocCountsForGeoHash = null; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 17e8fd35073..0cb799d2ac0 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.shard.ShardId; @@ -118,7 +119,7 @@ public class NestedAggregatorTests extends ESSingleNodeTestCase { IndexSearcher searcher = new IndexSearcher(directoryReader); IndexService indexService = createIndex("test"); - indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true, false); + indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), MapperService.MergeReason.MAPPING_UPDATE, false); SearchContext searchContext = createSearchContext(indexService); AggregationContext context = new AggregationContext(searchContext); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java index e962e90830f..65d5fbafbea 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java @@ -109,7 +109,7 @@ public class PipelineAggregationHelperTests extends ESTestCase { * @param values Array of values to compute metric for * @param metric A metric builder which defines what kind of metric should be returned for the values */ - public static double calculateMetric(double[] values, ValuesSourceMetricsAggregationBuilder metric) { + public static double calculateMetric(double[] values, ValuesSourceMetricsAggregationBuilder metric) { if (metric instanceof MinBuilder) { double accumulator = Double.POSITIVE_INFINITY; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 90d4437fcea..6184cb9fd68 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -77,7 +77,7 @@ public class MovAvgIT extends ESIntegTestCase { static int period; static HoltWintersModel.SeasonalityType seasonalityType; static BucketHelpers.GapPolicy gapPolicy; - static ValuesSourceMetricsAggregationBuilder metric; + static ValuesSourceMetricsAggregationBuilder metric; static List mockHisto; static Map> testValues; @@ -864,7 +864,7 @@ public class MovAvgIT extends ESIntegTestCase { public void testHoltWintersNotEnoughData() { try { - SearchResponse response = client() + client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) @@ -1003,7 +1003,7 @@ public class MovAvgIT extends ESIntegTestCase { public void testBadModelParams() { try { - SearchResponse response = client() + client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) @@ -1248,7 +1248,7 @@ public class MovAvgIT extends ESIntegTestCase { for (MovAvgModelBuilder builder : builders) { try { - SearchResponse response = client() + client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) @@ -1265,14 +1265,10 @@ public class MovAvgIT extends ESIntegTestCase { // All good } } - - - - } - private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { + private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { if (!expectedBucketIter.hasNext()) { fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch"); } @@ -1355,7 +1351,7 @@ public class MovAvgIT extends ESIntegTestCase { } } - private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) { + private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) { int rand = randomIntBetween(0,3); switch (rand) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java index aebd6a7e780..145587a4b27 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java @@ -60,7 +60,7 @@ public class SerialDiffIT extends ESIntegTestCase { static int numBuckets; static int lag; static BucketHelpers.GapPolicy gapPolicy; - static ValuesSourceMetricsAggregationBuilder metric; + static ValuesSourceMetricsAggregationBuilder metric; static List mockHisto; static Map> testValues; @@ -80,7 +80,7 @@ public class SerialDiffIT extends ESIntegTestCase { } } - private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) { + private ValuesSourceMetricsAggregationBuilder randomMetric(String name, String field) { int rand = randomIntBetween(0,3); switch (rand) { @@ -95,7 +95,7 @@ public class SerialDiffIT extends ESIntegTestCase { } } - private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { + private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { if (!expectedBucketIter.hasNext()) { fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch"); } diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index 1f2ef1a28d1..c45b04f7825 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -28,8 +28,10 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.MockEngineFactoryPlugin; @@ -52,7 +54,7 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class); + return pluginList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class, MockEngineFactoryPlugin.class); } public void testRandomExceptions() throws IOException, InterruptedException, ExecutionException { @@ -151,10 +153,16 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper { public static class TestPlugin extends Plugin { + public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); @Override public String name() { return "random-exception-reader-wrapper"; } + public void onModule(SettingsModule module) { + module.registerSetting(EXCEPTION_TOP_LEVEL_RATIO_SETTING); + module.registerSetting(EXCEPTION_LOW_LEVEL_RATIO_SETTING); + } @Override public String description() { return "a mock reader wrapper that throws random exceptions for testing"; @@ -172,7 +180,7 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { private final double lowLevelRatio; ThrowingSubReaderWrapper(Settings settings) { - final long seed = settings.getAsLong(SETTING_INDEX_SEED, 0l); + final long seed = ESIntegTestCase.INDEX_TEST_SEED_SETTING.get(settings); this.topLevelRatio = settings.getAsDouble(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d); this.lowLevelRatio = settings.getAsDouble(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d); this.random = new Random(seed); diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java index 1708d10f96f..a6b76359cff 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -102,15 +102,15 @@ public class SearchWithRandomIOExceptionsIT extends ESIntegTestCase { client().admin().indices().prepareFlush("test").setWaitIfOngoing(true).execute().get(); client().admin().indices().prepareClose("test").execute().get(); client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder() - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE, exceptionRate) - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN, exceptionOnOpenRate)); + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING.getKey(), exceptionRate) + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), exceptionOnOpenRate)); client().admin().indices().prepareOpen("test").execute().get(); } else { Settings.Builder settings = settingsBuilder() .put("index.number_of_replicas", randomIntBetween(0, 1)) - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE, exceptionRate) - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN, exceptionOnOpenRate); // we cannot expect that the index will be valid + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING.getKey(), exceptionRate) + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), exceptionOnOpenRate); // we cannot expect that the index will be valid logger.info("creating index: [test] using settings: [{}]", settings.build().getAsMap()); client().admin().indices().prepareCreate("test") .setSettings(settings) @@ -184,8 +184,8 @@ public class SearchWithRandomIOExceptionsIT extends ESIntegTestCase { // check the index still contains the records that we indexed without errors client().admin().indices().prepareClose("test").execute().get(); client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder() - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE, 0) - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN, 0)); + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING.getKey(), 0) + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), 0)); client().admin().indices().prepareOpen("test").execute().get(); ensureGreen(); SearchResponse searchResponse = client().prepareSearch().setTypes("type").setQuery(QueryBuilders.matchQuery("test", "init")).get(); diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index cbc7f93ff5a..3d3388b87b4 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -54,7 +54,7 @@ public class TransportSearchFailuresIT extends ESIntegTestCase { public void testFailedSearchWithWrongQuery() throws Exception { logger.info("Start Testing failed search with wrong query"); - assertAcked(prepareCreate("test", 1, settingsBuilder().put("routing.hash.type", "simple"))); + assertAcked(prepareCreate("test", 1)); ensureYellow(); NumShards test = getNumShards("test"); diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 6d239a8cdb0..eedc4d9e79f 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -82,8 +82,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { Set fullExpectedIds = new TreeSet<>(); Settings.Builder settingsBuilder = settingsBuilder() - .put(indexSettings()) - .put("routing.hash.type", "simple"); + .put(indexSettings()); if (numShards > 0) { settingsBuilder.put(SETTING_NUMBER_OF_SHARDS, numShards); @@ -122,8 +121,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { public void testDfsQueryThenFetch() throws Exception { Settings.Builder settingsBuilder = settingsBuilder() - .put(indexSettings()) - .put("routing.hash.type", "simple"); + .put(indexSettings()); client().admin().indices().create(createIndexRequest("test") .settings(settingsBuilder)) .actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index d7ede712447..5a1b99fe05f 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -19,6 +19,11 @@ package org.elasticsearch.search.builder; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -44,9 +49,9 @@ import org.elasticsearch.index.query.EmptyQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; -import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.Script; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder.InnerHit; @@ -63,11 +68,6 @@ import org.elasticsearch.threadpool.ThreadPoolModule; import org.junit.AfterClass; import org.junit.BeforeClass; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - import static org.hamcrest.Matchers.equalTo; public class SearchSourceBuilderTests extends ESTestCase { @@ -83,26 +83,33 @@ public class SearchSourceBuilderTests extends ESTestCase { .put("name", SearchSourceBuilderTests.class.toString()) .put("path.home", createTempDir()) .build(); + namedWriteableRegistry = new NamedWriteableRegistry(); injector = new ModulesBuilder().add( new SettingsModule(settings, new SettingsFilter(settings)), new ThreadPoolModule(new ThreadPool(settings)), - new IndicesModule() { + new SearchModule(settings, namedWriteableRegistry) { @Override - public void configure() { - // skip services - bindQueryParsersExtension(); + protected void configureSearch() { + // skip me so we don't need transport + } + @Override + protected void configureAggs() { + // skip me so we don't need scripting + } + @Override + protected void configureSuggesters() { + // skip me so we don't need IndicesService } }, new AbstractModule() { @Override protected void configure() { Multibinder.newSetBinder(binder(), ScoreFunctionParser.class); - bind(NamedWriteableRegistry.class).asEagerSingleton(); + bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); } } ).createInjector(); indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); - namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class); } @AfterClass diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 45be05c10d8..1351d2ed742 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -96,8 +96,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) // aggressive filter caching so that we can assert on the filter cache size - .put(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE) - .put(IndexModule.QUERY_CACHE_EVERYTHING, true) + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) + .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) .build(); } diff --git a/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java b/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java index b5f20e9357c..79bd729702a 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java @@ -33,9 +33,12 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.util.Collection; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -44,10 +47,15 @@ import static org.hamcrest.Matchers.greaterThan; public class ParentFieldLoadingIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.merge.enabled + } + private final Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_REFRESH_INTERVAL, -1) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1) // We never want merges in this test to ensure we have two segments for the last validation .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) .build(); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index e4d44ec73af..695575d9d08 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -33,14 +33,17 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Locale; @@ -66,6 +69,11 @@ import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.lessThan; public class DecayFunctionScoreIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + public void testDistanceScoreGeoLinGaussExp() throws Exception { assertAcked(prepareCreate("test").addMapping( "type1", diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java index b428d911dd5..d1d8278b79b 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java @@ -19,6 +19,8 @@ package org.elasticsearch.search.functionscore; +import java.util.Collection; + import org.apache.lucene.search.Explanation; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.SearchResponse; @@ -36,8 +38,6 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; -import java.util.Collection; - import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -103,7 +103,7 @@ public class FunctionScorePluginIT extends ESIntegTestCase { } public void onModule(SearchModule scoreModule) { - scoreModule.registerFunctionScoreParser(FunctionScorePluginIT.CustomDistanceScoreParser.class); + scoreModule.registerFunctionScoreParser(new FunctionScorePluginIT.CustomDistanceScoreParser()); } } diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java index b354edfc60e..14d620b9b99 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java @@ -26,10 +26,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.GeoValidationMethod; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; +import java.util.Collection; + import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; @@ -42,6 +46,11 @@ import static org.hamcrest.Matchers.equalTo; * */ public class GeoBoundingBoxIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + public void testSimpleBoundingBoxTest() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index e96c53fe943..ac8e9e029b5 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -52,8 +52,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.GeohashCellQuery; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.junit.BeforeClass; @@ -91,6 +93,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; */ public class GeoFilterIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + private static boolean intersectSupport; private static boolean disjointSupport; private static boolean withinSupport; diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java index 69b93018d8d..4d097ec87ab 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java @@ -26,11 +26,14 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -44,6 +47,11 @@ import static org.hamcrest.Matchers.equalTo; @ESIntegTestCase.SuiteScopeTestCase public class GeoPolygonIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + @Override protected void setupSuiteScopeCluster() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index 2ac5895c9eb..383bde00b01 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -19,6 +19,13 @@ package org.elasticsearch.search.highlight; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseFieldMatcher; @@ -42,16 +49,13 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilders; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.query.IdsQueryBuilder; -import org.elasticsearch.index.query.IdsQueryParser; import org.elasticsearch.index.query.MatchAllQueryBuilder; -import org.elasticsearch.index.query.MatchAllQueryParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.TermQueryParser; import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.highlight.HighlightBuilder.Field; import org.elasticsearch.search.highlight.HighlightBuilder.Order; import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions; @@ -60,15 +64,6 @@ import org.elasticsearch.test.IndexSettingsModule; import org.junit.AfterClass; import org.junit.BeforeClass; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -84,12 +79,7 @@ public class HighlightBuilderTests extends ESTestCase { @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - @SuppressWarnings("rawtypes") - Set injectedQueryParsers = new HashSet<>(); - injectedQueryParsers.add(new MatchAllQueryParser()); - injectedQueryParsers.add(new IdsQueryParser()); - injectedQueryParsers.add(new TermQueryParser()); - indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(), injectedQueryParsers, namedWriteableRegistry); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry(); } @AfterClass diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index daa996be702..1e71b868236 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -40,6 +40,7 @@ import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.util.ArrayList; import java.util.Collection; @@ -72,7 +73,7 @@ import static org.hamcrest.Matchers.nullValue; public class InnerHitsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(MockScriptEngine.TestPlugin.class); + return pluginList(MockScriptEngine.TestPlugin.class, InternalSettingsPlugin.class); } public void testSimpleNested() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 61890092831..fd9ee9a3f10 100644 --- a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -235,7 +235,7 @@ public class SimpleNestedIT extends ESIntegTestCase { // This IncludeNestedDocsQuery also needs to be aware of the filter from alias public void testDeleteNestedDocsWithAlias() throws Exception { assertAcked(prepareCreate("test") - .setSettings(settingsBuilder().put(indexSettings()).put("index.referesh_interval", -1).build()) + .setSettings(settingsBuilder().put(indexSettings()).put("index.refresh_interval", -1).build()) .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1") .field("type", "string") @@ -412,7 +412,7 @@ public class SimpleNestedIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .setSettings(settingsBuilder() .put(indexSettings()) - .put("index.referesh_interval", -1)) + .put("index.refresh_interval", -1)) .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("nested1") .field("type", "nested") diff --git a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java index 96f2e23bba1..20cf8596c4a 100644 --- a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -57,7 +57,7 @@ public class SearchScrollWithFailingNodesIT extends ESIntegTestCase { assertAcked( prepareCreate("test") // Enforces that only one shard can only be allocated to a single node - .setSettings(Settings.builder().put(indexSettings()).put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE, 1)) + .setSettings(Settings.builder().put(indexSettings()).put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 1)) ); List writes = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java index 27cc3d3cfb8..d14ea50838f 100644 --- a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.internal.DefaultSearchContext; @@ -40,6 +41,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; @@ -124,6 +126,16 @@ public class SimpleSearchIT extends ESIntegTestCase { refresh(); SearchResponse search = client().prepareSearch() + .setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1"))) + .execute().actionGet(); + assertHitCount(search, 1L); + + search = client().prepareSearch() + .setQuery(queryStringQuery("ip: 192.168.0.1")) + .execute().actionGet(); + assertHitCount(search, 1L); + + search = client().prepareSearch() .setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/32"))) .execute().actionGet(); assertHitCount(search, 1l); @@ -282,54 +294,54 @@ public class SimpleSearchIT extends ESIntegTestCase { createIndex("idx"); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); - assertWindowFails(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW)); - assertWindowFails(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW + 1)); - assertWindowFails(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW) - .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW)); + assertWindowFails(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY))); + assertWindowFails(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1)); + assertWindowFails(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) + .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY))); } public void testLargeFromAndSizeSucceeds() throws Exception { createIndex("idx"); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); - assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW - 10).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW / 2) - .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW / 2 - 1).get(), 1); + assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) - 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) / 2) + .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) / 2 - 1).get(), 1); } public void testTooLargeFromAndSizeOkBySetting() throws Exception { - prepareCreate("idx").setSettings(DefaultSearchContext.MAX_RESULT_WINDOW, DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 2).get(); + prepareCreate("idx").setSettings(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 2).get(); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); - assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW + 1).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW) - .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); + assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) + .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); } public void testTooLargeFromAndSizeOkByDynamicSetting() throws Exception { createIndex("idx"); assertAcked(client().admin().indices().prepareUpdateSettings("idx") .setSettings( - Settings.builder().put(DefaultSearchContext.MAX_RESULT_WINDOW, DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 2)) + Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 2)) .get()); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); - assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW + 1).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW) - .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); + assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) + .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); } public void testTooLargeFromAndSizeBackwardsCompatibilityRecommendation() throws Exception { - prepareCreate("idx").setSettings(DefaultSearchContext.MAX_RESULT_WINDOW, Integer.MAX_VALUE).get(); + prepareCreate("idx").setSettings(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), Integer.MAX_VALUE).get(); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); - assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10) - .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10) + .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1); } public void testQueryNumericFieldWithRegex() throws Exception { @@ -350,7 +362,7 @@ public class SimpleSearchIT extends ESIntegTestCase { fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("Result window is too large, from + size must be less than or equal to: [" - + DefaultSearchContext.Defaults.MAX_RESULT_WINDOW)); + + IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY))); assertThat(e.toString(), containsString("See the scroll api for a more efficient way to request large data sets")); } } diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index dcea25617b2..ad554b7628b 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -766,7 +766,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> update index settings to back to normal"); assertAcked(client().admin().indices().prepareUpdateSettings("test-*").setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "node") + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "node") )); // Make sure that snapshot finished - doesn't matter if it failed or succeeded @@ -888,12 +888,11 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest } assertAcked(client().admin().indices().prepareUpdateSettings(name).setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "all") - .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, between(100, 50000)) + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "all") + .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), between(100, 50000)) )); } - static { MetaData.registerPrototype(SnapshottableMetadata.TYPE, SnapshottableMetadata.PROTO); MetaData.registerPrototype(NonSnapshottableMetadata.TYPE, NonSnapshottableMetadata.PROTO); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index bd94a974a0f..4cbf436a743 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -80,8 +80,8 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.IndexSettings.INDEX_REFRESH_INTERVAL; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAliasesExist; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAliasesMissing; @@ -748,7 +748,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .put("location", randomRepoPath()))); logger.info("--> creating index that cannot be allocated"); - prepareCreate("test-idx", 2, Settings.builder().put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + ".tag", "nowhere").put("index.number_of_shards", 3)).get(); + prepareCreate("test-idx", 2, Settings.builder().put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + ".tag", "nowhere").put("index.number_of_shards", 3)).get(); logger.info("--> snapshot"); CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get(); @@ -1524,8 +1524,8 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas // Update settings to make sure that relocation is slow so we can start snapshot before relocation is finished assertAcked(client.admin().indices().prepareUpdateSettings("test-idx").setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "all") - .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, 100, ByteSizeUnit.BYTES) + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "all") + .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), 100, ByteSizeUnit.BYTES) )); logger.info("--> start relocations"); @@ -1540,7 +1540,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas // Update settings to back to normal assertAcked(client.admin().indices().prepareUpdateSettings("test-idx").setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "node") + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "none") )); logger.info("--> wait for snapshot to complete"); @@ -1627,7 +1627,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas Settings.Builder indexSettings = Settings.builder() .put(indexSettings()) .put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)) - .put(INDEX_REFRESH_INTERVAL, "10s") + .put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s") .put("index.analysis.analyzer.my_analyzer.type", "custom") .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") .putArray("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym") @@ -1695,7 +1695,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> assert that correct settings are restored"); GetSettingsResponse getSettingsResponse = client.admin().indices().prepareGetSettings("test-idx").execute().actionGet(); - assertThat(getSettingsResponse.getSetting("test-idx", INDEX_REFRESH_INTERVAL), equalTo("5s")); + assertThat(getSettingsResponse.getSetting("test-idx", INDEX_REFRESH_INTERVAL_SETTING.getKey()), equalTo("5s")); // Make sure that number of shards didn't change assertThat(getSettingsResponse.getSetting("test-idx", SETTING_NUMBER_OF_SHARDS), equalTo("" + numberOfShards)); assertThat(getSettingsResponse.getSetting("test-idx", "index.analysis.analyzer.my_analyzer.type"), equalTo("standard")); @@ -1717,7 +1717,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> assert that correct settings are restored and index is still functional"); getSettingsResponse = client.admin().indices().prepareGetSettings("test-idx").execute().actionGet(); - assertThat(getSettingsResponse.getSetting("test-idx", INDEX_REFRESH_INTERVAL), equalTo("5s")); + assertThat(getSettingsResponse.getSetting("test-idx", INDEX_REFRESH_INTERVAL_SETTING.getKey()), equalTo("5s")); // Make sure that number of shards didn't change assertThat(getSettingsResponse.getSetting("test-idx", SETTING_NUMBER_OF_SHARDS), equalTo("" + numberOfShards)); @@ -1739,7 +1739,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas Settings.Builder indexSettings = Settings.builder() .put(indexSettings()) .put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)) - .put(INDEX_REFRESH_INTERVAL, "10s"); + .put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s"); logger.info("--> create index"); assertAcked(prepareCreate("test-idx", 2, indexSettings)); diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index 7cf79cacf56..7c72014532b 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -47,6 +47,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.util.ArrayList; @@ -453,7 +454,9 @@ public class UpdateIT extends ESIntegTestCase { PutFieldValuesScriptPlugin.class, FieldIncrementScriptPlugin.class, ScriptedUpsertScriptPlugin.class, - ExtractContextInSourceScriptPlugin.class); + ExtractContextInSourceScriptPlugin.class, + InternalSettingsPlugin.class // uses index.merge.enabled + ); } private void createTestIndex() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java b/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java index 6c6c45e9cfd..fe36b749625 100644 --- a/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java +++ b/core/src/test/java/org/elasticsearch/watcher/ResourceWatcherServiceTests.java @@ -79,7 +79,7 @@ public class ResourceWatcherServiceTests extends ESTestCase { }; // checking default freq - WatcherHandle handle = service.add(watcher); + WatcherHandle handle = service.add(watcher); assertThat(handle, notNullValue()); assertThat(handle.frequency(), equalTo(ResourceWatcherService.Frequency.MEDIUM)); assertThat(service.lowMonitor.watchers.size(), is(0)); diff --git a/docs/java-api/docs/bulk.asciidoc b/docs/java-api/docs/bulk.asciidoc index 248326700c4..0b43b89c07c 100644 --- a/docs/java-api/docs/bulk.asciidoc +++ b/docs/java-api/docs/bulk.asciidoc @@ -127,5 +127,5 @@ bulkProcessor.close(); Both methods flush any remaining documents and disable all other scheduled flushes if they were scheduled by setting `flushInterval`. If concurrent requests were enabled the `awaitClose` method waits for up to the specified timeout for all bulk requests to complete then returns `true`, if the specified waiting time elapses before all bulk requests complete, -`false` is returned. The `close` method doesn't wait for any remaining bulk requests to complete and exists immediately. +`false` is returned. The `close` method doesn't wait for any remaining bulk requests to complete and exits immediately. diff --git a/docs/java-api/search.asciidoc b/docs/java-api/search.asciidoc index a8da951fa27..27ad0beac1c 100644 --- a/docs/java-api/search.asciidoc +++ b/docs/java-api/search.asciidoc @@ -65,7 +65,7 @@ do { //Handle the hit... } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); + scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(60000)).execute().actionGet(); } while(scrollResp.getHits().getHits().length != 0); // Zero hits mark the end of the scroll and the while loop. -------------------------------------------------- diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index 144d6f72548..bcef61d4ef7 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -131,9 +131,15 @@ the operating system: `os.cpu.percent`:: Recent CPU usage for the whole system, or -1 if not supported -`os.cpu.load_average`:: - Array of system load averages for the last one minute, five - minute and fifteen minutes (value of -1 indicates not supported) +`os.cpu.load_average.1m`:: + One-minute load average on the system (field is not present if + one-minute load average is not available) +`os.cpu.load_average.5m`:: + Five-minute load average on the system (field is not present if + five-minute load average is not available) +`os.cpu.load_average.15m`:: + Fifteen-minute load average on the system (field is not present if + fifteen-minute load average is not available) `os.mem.total_in_bytes`:: Total amount of physical memory in bytes diff --git a/docs/reference/cluster/reroute.asciidoc b/docs/reference/cluster/reroute.asciidoc index cc4dffa612a..99e754df529 100644 --- a/docs/reference/cluster/reroute.asciidoc +++ b/docs/reference/cluster/reroute.asciidoc @@ -20,7 +20,7 @@ curl -XPOST 'localhost:9200/_cluster/reroute' -d '{ } }, { - "allocate" : { + "allocate_replica" : { "index" : "test", "shard" : 1, "node" : "node3" } } @@ -64,14 +64,42 @@ The commands supported are: from the primary shard by cancelling them and allowing them to be reinitialized through the standard reallocation process. -`allocate`:: - Allocate an unassigned shard to a node. Accepts the +`allocate_replica`:: + Allocate an unassigned replica shard to a node. Accepts the `index` and `shard` for index name and shard number, and `node` to - allocate the shard to. It also accepts `allow_primary` flag to - explicitly specify that it is allowed to explicitly allocate a primary - shard (might result in data loss). + allocate the shard to. Takes <> into account. -WARNING: The `allow_primary` parameter will force a new _empty_ primary shard -to be allocated *without any data*. If a node which has a copy of the original -primary shard (including data) rejoins the cluster later on, that data will be -deleted: the old shard copy will be replaced by the new live shard copy. +Two more commands are available that allow the allocation of a primary shard +to a node. These commands should however be used with extreme care, as primary +shard allocation is usually fully automatically handled by Elasticsearch. +Reasons why a primary shard cannot be automatically allocated include the following: + +- A new index was created but there is no node which satisfies the allocation deciders. +- An up-to-date shard copy of the data cannot be found on the current data nodes in +the cluster. To prevent data loss, the system does not automatically promote a stale +shard copy to primary. + +As a manual override, two commands to forcefully allocate primary shards +are available: + +`allocate_stale_primary`:: + Allocate a primary shard to a node that holds a stale copy. Accepts the + `index` and `shard` for index name and shard number, and `node` to + allocate the shard to. Using this command may lead to data loss + for the provided shard id. If a node which has the good copy of the + data rejoins the cluster later on, that data will be overwritten with + the data of the stale copy that was forcefully allocated with this + command. To ensure that these implications are well-understood, + this command requires the special field `accept_data_loss` to be + explicitly set to `true` for it to work. + +`allocate_empty_primary`:: + Allocate an empty primary shard to a node. Accepts the + `index` and `shard` for index name and shard number, and `node` to + allocate the shard to. Using this command leads to a complete loss + of all data that was indexed into this shard, if it was previously + started. If a node which has a copy of the + data rejoins the cluster later on, that data will be deleted! + To ensure that these implications are well-understood, + this command requires the special field `accept_data_loss` to be + explicitly set to `true` for it to work. diff --git a/docs/reference/indices/shadow-replicas.asciidoc b/docs/reference/indices/shadow-replicas.asciidoc index 1499aab5909..0d589adb64a 100644 --- a/docs/reference/indices/shadow-replicas.asciidoc +++ b/docs/reference/indices/shadow-replicas.asciidoc @@ -58,9 +58,9 @@ curl -XPUT 'localhost:9200/my_index' -d ' [WARNING] ======================== In the above example, the "/opt/data/my_index" path is a shared filesystem that -must be available on every data node in the Elasticsearch cluster. You must -also ensure that the Elasticsearch process has the correct permissions to read -from and write to the directory used in the `index.data_path` setting. +must be available on every node in the Elasticsearch cluster. You must also +ensure that the Elasticsearch process has the correct permissions to read from +and write to the directory used in the `index.data_path` setting. ======================== An index that has been created with the `index.shadow_replicas` setting set to diff --git a/docs/reference/mapping/params/doc-values.asciidoc b/docs/reference/mapping/params/doc-values.asciidoc index a0108b899b9..81f9b6e3c64 100644 --- a/docs/reference/mapping/params/doc-values.asciidoc +++ b/docs/reference/mapping/params/doc-values.asciidoc @@ -7,7 +7,7 @@ unique sorted list of terms, and from that immediately have access to the list of documents that contain the term. Sorting, aggregations, and access to field values in scripts requires a -different data access pattern. Instead of lookup up the term and finding +different data access pattern. Instead of looking up the term and finding documents, we need to be able to look up the document and find the terms that it has in a field. diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index e13b94c7773..ed0bb47e9d4 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -199,3 +199,10 @@ phase. Instead, highlighting needs to be performed via ============================================= + +==== Limiting the number of `nested` fields + +Indexing a document with 100 nested fields actually indexes 101 documents as each nested +document is indexed as a separate document. To safeguard against ill-defined mappings +the number of nested fields that can be defined per index has been limited to 50. This +default limit can be changed with the index setting `index.mapping.nested_fields.limit`. diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index 56c000c3d9a..395b4311ec8 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -18,6 +18,8 @@ See <> for more info. -- include::migrate_3_0.asciidoc[] +include::migrate_2_3.asciidoc[] + include::migrate_2_2.asciidoc[] include::migrate_2_1.asciidoc[] diff --git a/docs/reference/migration/migrate_2_3.asciidoc b/docs/reference/migration/migrate_2_3.asciidoc new file mode 100644 index 00000000000..0d741e2adb2 --- /dev/null +++ b/docs/reference/migration/migrate_2_3.asciidoc @@ -0,0 +1,19 @@ +[[breaking-changes-2.3]] +== Breaking changes in 2.3 + +This section discusses the changes that you need to be aware of when migrating +your application to Elasticsearch 2.3. + +* <> + +[[breaking_23_index_apis]] +=== Mappings + +==== Limit to the number of `nested` fields + +Indexing a document with 100 nested fields actually indexes 101 documents as each nested +document is indexed as a separate document. To safeguard against ill-defined mappings +the number of nested fields that can be defined per index has been limited to 50. +This default limit can be changed with the index setting `index.mapping.nested_fields.limit`. +Note that the limit is only checked when new indices are created or mappings are updated. It +will thus only affect existing pre-2.3 indices if their mapping is changed. diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index bffca162ed1..2e622f1ca44 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -560,30 +560,30 @@ and high risk of being misused. The ability to change the thread pool type for a that it is still possible to adjust relevant thread pool parameters for each of the thread pools (e.g., depending on the thread pool type, `keep_alive`, `queue_size`, etc.). +[[breaking_30_cpu_stats]] === System CPU stats The recent CPU usage (as a percent) has been added to the OS stats reported under the node stats API and the cat nodes API. The breaking -change here is that there is a new object in the "os" object in the node -stats response. This object is called "cpu" and includes "percent" and -"load_average" as fields. This moves the "load_average" field that was -previously a top-level field in the "os" object to the "cpu" object. The -format of the "load_average" field has changed to an array of length -three representing the one-minute, five-minute and fifteen-minute load -averages (a value of -1 for any of array components indicates that the -corresponding metric is not available). +change here is that there is a new object in the `os` object in the node +stats response. This object is called `cpu` and includes "percent" and +`load_average` as fields. This moves the `load_average` field that was +previously a top-level field in the `os` object to the `cpu` object. The +format of the `load_average` field has changed to an object with fields +`1m`, `5m`, and `15m` representing the one-minute, five-minute and +fifteen-minute loads respectively. If any of these fields are not present, +it indicates that the corresponding value is not available. -In the cat nodes API response, the "cpu" field is output by default. The -previous "load" field has been removed and is replaced by "load_1m", -"load_5m", and "load_15m" which represent the one-minute, five-minute -and fifteen-minute loads respectively. These values are output by -default, and a value of -1 indicates that the corresponding metric is -not available. +In the cat nodes API response, the `cpu` field is output by default. The +previous `load` field has been removed and is replaced by `load_1m`, +`load_5m`, and `load_15m` which represent the one-minute, five-minute +and fifteen-minute loads respectively. The field will be null if the +corresponding value is not available. -Finally, the API for org.elasticsearch.monitor.os.OsStats has +Finally, the API for `org.elasticsearch.monitor.os.OsStats` has changed. The `getLoadAverage` method has been removed. The value for this can now be obtained from `OsStats.Cpu#getLoadAverage` but it is no -longer a double and is instead an object encapuslating the one-minute, +longer a double and is instead an object encapsulating the one-minute, five-minute and fifteen-minute load averages. Additionally, the recent CPU usage can be obtained from `OsStats.Cpu#getPercent`. @@ -603,6 +603,13 @@ Allocation IDs assign unique identifiers to shard copies. This allows the cluste copies of the same data and track which shards have been active, so that after a cluster restart, shard copies containing only the most recent data can become primaries. +=== Reroute commands + +The reroute command `allocate` has been split into two distinct commands `allocate_replica` and `allocate_empty_primary`. +This was done as we introduced a new `allocate_stale_primary` command. The new `allocate_replica` command corresponds to the +old `allocate` command with `allow_primary` set to false. The new `allocate_empty_primary` command corresponds to the old +`allocate` command with `allow_primary` set to true. + ==== `index.shared_filesystem.recover_on_any_node` changes The behavior of `index.shared_filesystem.recover_on_any_node = true` has been changed. Previously, in the case where no diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java index 8dcefce1478..3da22bf8a43 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java @@ -21,21 +21,14 @@ package org.elasticsearch.script.expression; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class ExpressionRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(ExpressionPlugin.class); - } - public ExpressionRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 73ad6043f37..6f9b043d8bc 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -26,9 +26,6 @@ dependencies { compile 'org.codehaus.groovy:groovy:2.4.4:indy' } -compileJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked,-cast' -compileTestJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked,-cast' - integTest { cluster { systemProperty 'es.script.inline', 'on' diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 98ed5695973..44ee9cdaf54 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -23,6 +23,7 @@ import groovy.lang.Binding; import groovy.lang.GroovyClassLoader; import groovy.lang.GroovyCodeSource; import groovy.lang.Script; + import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.codehaus.groovy.ast.ClassCodeExpressionTransformer; @@ -182,6 +183,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri String fake = MessageDigests.toHexString(MessageDigests.sha1().digest(script.getBytes(StandardCharsets.UTF_8))); // same logic as GroovyClassLoader.parseClass() but with a different codesource string: return AccessController.doPrivileged(new PrivilegedAction() { + @Override public Class run() { GroovyCodeSource gcs = new GroovyCodeSource(script, fake, BootstrapInfo.UNTRUSTED_CODEBASE); gcs.setCachable(false); @@ -203,7 +205,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri */ @SuppressWarnings("unchecked") private Script createScript(Object compiledScript, Map vars) throws InstantiationException, IllegalAccessException { - Class scriptClass = (Class) compiledScript; + Class scriptClass = (Class) compiledScript; Script scriptObject = (Script) scriptClass.newInstance(); Binding binding = new Binding(); binding.getVariables().putAll(vars); @@ -211,7 +213,6 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri return scriptObject; } - @SuppressWarnings({"unchecked"}) @Override public ExecutableScript executable(CompiledScript compiledScript, Map vars) { try { @@ -225,7 +226,6 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri } } - @SuppressWarnings({"unchecked"}) @Override public SearchScript search(final CompiledScript compiledScript, final SearchLookup lookup, @Nullable final Map vars) { return new SearchScript() { @@ -288,7 +288,6 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri } } - @SuppressWarnings({"unchecked"}) @Override public void setNextVar(String name, Object value) { variables.put(name, value); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java index 9b2e0041462..2a6be52f3f4 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java @@ -23,7 +23,6 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; @@ -35,13 +34,9 @@ import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpServerTransport; @@ -59,6 +54,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.script.groovy.GroovyScriptEngineService; +import org.elasticsearch.test.ActionRecordingPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; @@ -66,12 +62,9 @@ import org.elasticsearch.test.rest.client.http.HttpResponse; import org.junit.After; import org.junit.Before; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Locale; -import java.util.concurrent.CopyOnWriteArrayList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -89,7 +82,6 @@ import static org.hamcrest.Matchers.is; @ClusterScope(scope = SUITE) public class ContextAndHeaderTransportTests extends ESIntegTestCase { - private static final List requests = new CopyOnWriteArrayList<>(); private String randomHeaderKey = randomAsciiOfLength(10); private String randomHeaderValue = randomAsciiOfLength(20); private String queryIndex = "query-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); @@ -106,7 +98,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(ActionLoggingPlugin.class, GroovyPlugin.class); + return pluginList(ActionRecordingPlugin.class, GroovyPlugin.class); } @Before @@ -128,7 +120,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { .setSettings(settings).addMapping("type", mapping)); ensureGreen(queryIndex, lookupIndex); - requests.clear(); + ActionRecordingPlugin.clear(); } @After @@ -193,7 +185,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1); - assertThat(requests, hasSize(greaterThan(0))); + assertThat(ActionRecordingPlugin.allRequests(), hasSize(greaterThan(0))); assertGetRequestsContainHeaders(); } @@ -281,7 +273,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { .execute(); assertThat(response, hasStatus(OK)); - List searchRequests = getRequests(SearchRequest.class); + List searchRequests = ActionRecordingPlugin.requestsOfType(SearchRequest.class); assertThat(searchRequests, hasSize(greaterThan(0))); for (SearchRequest searchRequest : searchRequests) { assertThat(searchRequest.hasHeader(releventHeaderName), is(true)); @@ -290,20 +282,9 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { } } - private List getRequests(Class clazz) { - List results = new ArrayList<>(); - for (ActionRequest request : requests) { - if (request.getClass().equals(clazz)) { - results.add((T) request); - } - } - - return results; - } - - private void assertRequestsContainHeader(Class clazz) { - List classRequests = getRequests(clazz); - for (ActionRequest request : classRequests) { + private void assertRequestsContainHeader(Class> clazz) { + List> classRequests = ActionRecordingPlugin.requestsOfType(clazz); + for (ActionRequest request : classRequests) { assertRequestContainsHeader(request); } } @@ -313,7 +294,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { } private void assertGetRequestsContainHeaders(String index) { - List getRequests = getRequests(GetRequest.class); + List getRequests = ActionRecordingPlugin.requestsOfType(GetRequest.class); assertThat(getRequests, hasSize(greaterThan(0))); for (GetRequest request : getRequests) { @@ -324,7 +305,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { } } - private void assertRequestContainsHeader(ActionRequest request) { + private void assertRequestContainsHeader(ActionRequest request) { String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", randomHeaderKey, request.getClass().getName()); if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; @@ -342,7 +323,9 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { Client transportClient = internalCluster().transportClient(); FilterClient filterClient = new FilterClient(transportClient) { @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + Action action, Request request, + ActionListener listener) { request.putHeader(randomHeaderKey, randomHeaderValue); super.doExecute(action, request, listener); } @@ -350,58 +333,4 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { return filterClient; } - - public static class ActionLoggingPlugin extends Plugin { - - @Override - public String name() { - return "test-action-logging"; - } - - @Override - public String description() { - return "Test action logging"; - } - - @Override - public Collection nodeModules() { - return Collections.singletonList(new ActionLoggingModule()); - } - - public void onModule(ActionModule module) { - module.registerFilter(LoggingFilter.class); - } - } - - public static class ActionLoggingModule extends AbstractModule { - @Override - protected void configure() { - bind(LoggingFilter.class).asEagerSingleton(); - } - - } - - public static class LoggingFilter extends ActionFilter.Simple { - - @Inject - public LoggingFilter(Settings settings) { - super(settings); - } - - @Override - public int order() { - return 999; - } - - @Override - protected boolean apply(String action, ActionRequest request, ActionListener listener) { - requests.add(request); - return true; - } - - @Override - protected boolean apply(String action, ActionResponse response, ActionListener listener) { - return true; - } - } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java index d79beb186cf..f101303ee82 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java @@ -40,13 +40,13 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -72,7 +72,7 @@ import static org.hamcrest.Matchers.equalTo; public class GeoDistanceTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return pluginList(GroovyPlugin.class, InternalSettingsPlugin.class); } public void testSimpleDistance() throws Exception { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java index a8f78c62c77..9b3e1a342a8 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -65,7 +66,7 @@ public class MinDocCountTests extends AbstractTermsTestCase { return Collections.singleton(GroovyPlugin.class); } - private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); + private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); private static int cardinality; @@ -77,7 +78,6 @@ public class MinDocCountTests extends AbstractTermsTestCase { final List indexRequests = new ArrayList<>(); final Set stringTerms = new HashSet<>(); final LongSet longTerms = new LongHashSet(); - final Set dateTerms = new HashSet<>(); for (int i = 0; i < cardinality; ++i) { String stringTerm; do { @@ -319,7 +319,6 @@ public class MinDocCountTests extends AbstractTermsTestCase { throw ae; } } - } public void testHistogramCountAsc() throws Exception { @@ -372,11 +371,9 @@ public class MinDocCountTests extends AbstractTermsTestCase { .execute().actionGet(); assertSubset(allHisto, (Histogram) response.getAggregations().get("histo"), minDocCount); } - } private void testMinDocCountOnDateHistogram(Histogram.Order order) throws Exception { - final int interval = randomIntBetween(1, 3); final SearchResponse allResponse = client().prepareSearch("idx").setTypes("type") .setSize(0) .setQuery(QUERY) @@ -393,7 +390,5 @@ public class MinDocCountTests extends AbstractTermsTestCase { .execute().actionGet(); assertSubset(allHisto, (Histogram) response.getAggregations().get("histo"), minDocCount); } - } - } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java index f3574982377..2c1d5256379 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java @@ -53,8 +53,8 @@ public class ScriptQuerySearchTests extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) // aggressive filter caching so that we can assert on the number of iterations of the script filters - .put(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE) - .put(IndexModule.QUERY_CACHE_EVERYTHING, true) + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) + .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) .build(); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java index 98d53c85174..7e6dbd67f56 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java @@ -147,15 +147,15 @@ public class ScriptedMetricTests extends ESIntegTestCase { for (Object object : aggregationList) { assertThat(object, notNullValue()); assertThat(object, instanceOf(Map.class)); - Map map = (Map) object; + Map map = (Map) object; assertThat(map.size(), lessThanOrEqualTo(1)); if (map.size() == 1) { - assertThat(map.get("count"), notNullValue()); - assertThat(map.get("count"), instanceOf(Number.class)); - assertThat((Number) map.get("count"), equalTo((Number) 1)); + assertThat(map.get("count"), notNullValue()); + assertThat(map.get("count"), instanceOf(Number.class)); + assertThat((Number) map.get("count"), equalTo((Number) 1)); numShardsRun++; + } } - } // We don't know how many shards will have documents but we need to make // sure that at least one shard ran the map script assertThat(numShardsRun, greaterThan(0)); @@ -740,6 +740,7 @@ public class ScriptedMetricTests extends ESIntegTestCase { assertThat(scriptedMetric.getName(), equalTo("scripted")); assertThat(scriptedMetric.aggregation(), notNullValue()); assertThat(scriptedMetric.aggregation(), instanceOf(List.class)); + @SuppressWarnings("unchecked") // We'll just get a ClassCastException a couple lines down if we're wrong, its ok. List aggregationResult = (List) scriptedMetric.aggregation(); assertThat(aggregationResult.size(), equalTo(1)); assertThat(aggregationResult.get(0), equalTo(0)); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index 5a56e0f6999..b78c1c264c3 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -339,9 +339,7 @@ public class SearchFieldsTests extends ESIntegTestCase { .execute().actionGet(); client().admin().indices().refresh(refreshRequest()).actionGet(); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) -.addScriptField("s_obj1", new Script("_source.obj1")) + SearchResponse response = client().prepareSearch().setQuery(matchAllQuery()).addScriptField("s_obj1", new Script("_source.obj1")) .addScriptField("s_obj1_test", new Script("_source.obj1.test")).addScriptField("s_obj2", new Script("_source.obj2")) .addScriptField("s_obj2_arr2", new Script("_source.obj2.arr2")).addScriptField("s_arr3", new Script("_source.arr3")) .execute().actionGet(); @@ -355,7 +353,7 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(response.getHits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something")); Map sObj2 = response.getHits().getAt(0).field("s_obj2").value(); - List sObj2Arr2 = (List) sObj2.get("arr2"); + List sObj2Arr2 = (List) sObj2.get("arr2"); assertThat(sObj2Arr2.size(), equalTo(2)); assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1")); assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2")); @@ -365,8 +363,8 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1")); assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2")); - List sObj2Arr3 = response.getHits().getAt(0).field("s_arr3").values(); - assertThat(((Map) sObj2Arr3.get(0)).get("arr3_field1").toString(), equalTo("arr3_value1")); + List sObj2Arr3 = response.getHits().getAt(0).field("s_arr3").values(); + assertThat(((Map) sObj2Arr3.get(0)).get("arr3_field1").toString(), equalTo("arr3_value1")); } public void testPartialFields() throws Exception { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java index 8d959022412..bbc2d0789de 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java @@ -53,6 +53,7 @@ import org.elasticsearch.search.sort.ScriptSortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.junit.annotations.TestLogging; import org.hamcrest.Matchers; @@ -105,7 +106,7 @@ import static org.hamcrest.Matchers.nullValue; public class SimpleSortTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return pluginList(GroovyPlugin.class, InternalSettingsPlugin.class); } @TestLogging("action.search.type:TRACE") diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java index b96436dd77f..b73ec250daf 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java @@ -21,21 +21,14 @@ package org.elasticsearch.script.groovy; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class GroovyRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(GroovyPlugin.class); - } - public GroovyRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index 4e8e9cc345d..694ddc74606 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -26,8 +26,6 @@ dependencies { compile "com.github.spullara.mustache.java:compiler:0.9.1" } -compileTestJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked' - integTest { cluster { systemProperty 'es.script.inline', 'on' diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java index 92d15332780..485e687b4a5 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.messy.tests; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; @@ -29,17 +28,12 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -52,6 +46,7 @@ import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.elasticsearch.test.ActionRecordingPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.junit.After; @@ -60,12 +55,10 @@ import org.junit.Before; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -84,7 +77,6 @@ import static org.hamcrest.Matchers.is; @ClusterScope(scope = SUITE) public class ContextAndHeaderTransportTests extends ESIntegTestCase { - private static final List requests = new CopyOnWriteArrayList<>(); private String randomHeaderKey = randomAsciiOfLength(10); private String randomHeaderValue = randomAsciiOfLength(20); private String queryIndex = "query-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); @@ -101,7 +93,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(ActionLoggingPlugin.class, MustachePlugin.class); + return pluginList(ActionRecordingPlugin.class, MustachePlugin.class); } @Before @@ -122,14 +114,13 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { assertAcked(transportClient().admin().indices().prepareCreate(queryIndex) .setSettings(settings).addMapping("type", mapping)); ensureGreen(queryIndex, lookupIndex); - - requests.clear(); } @After public void checkAllRequestsContainHeaders() { assertRequestsContainHeader(IndexRequest.class); assertRequestsContainHeader(RefreshRequest.class); + ActionRecordingPlugin.clear(); } public void testThatIndexedScriptGetRequestInTemplateQueryContainsContextAndHeaders() throws Exception { @@ -216,7 +207,6 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { titles.add("Representative Government"); titles.add("Election"); - List builders = new ArrayList<>(); for (String title: titles) { transportClient().prepareIndex("test", "type1").setSource("title", title).get(); } @@ -272,30 +262,15 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { assertRequestsContainHeader(PutIndexedScriptRequest.class); } - private List getRequests(Class clazz) { - List results = new ArrayList<>(); - for (ActionRequest request : requests) { - if (request.getClass().equals(clazz)) { - results.add((T) request); - } - } - - return results; - } - - private void assertRequestsContainHeader(Class clazz) { - List classRequests = getRequests(clazz); - for (ActionRequest request : classRequests) { + private void assertRequestsContainHeader(Class> clazz) { + List> classRequests = ActionRecordingPlugin.requestsOfType(clazz); + for (ActionRequest request : classRequests) { assertRequestContainsHeader(request); } } - private void assertGetRequestsContainHeaders() { - assertGetRequestsContainHeaders(this.lookupIndex); - } - private void assertGetRequestsContainHeaders(String index) { - List getRequests = getRequests(GetRequest.class); + List getRequests = ActionRecordingPlugin.requestsOfType(GetRequest.class); assertThat(getRequests, hasSize(greaterThan(0))); for (GetRequest request : getRequests) { @@ -306,7 +281,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { } } - private void assertRequestContainsHeader(ActionRequest request) { + private void assertRequestContainsHeader(ActionRequest request) { String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", randomHeaderKey, request.getClass().getName()); if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; @@ -324,7 +299,9 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { Client transportClient = internalCluster().transportClient(); FilterClient filterClient = new FilterClient(transportClient) { @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + Action action, Request request, + ActionListener listener) { request.putHeader(randomHeaderKey, randomHeaderValue); super.doExecute(action, request, listener); } @@ -332,58 +309,4 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { return filterClient; } - - public static class ActionLoggingPlugin extends Plugin { - - @Override - public String name() { - return "test-action-logging"; - } - - @Override - public String description() { - return "Test action logging"; - } - - @Override - public Collection nodeModules() { - return Collections.singletonList(new ActionLoggingModule()); - } - - public void onModule(ActionModule module) { - module.registerFilter(LoggingFilter.class); - } - } - - public static class ActionLoggingModule extends AbstractModule { - @Override - protected void configure() { - bind(LoggingFilter.class).asEagerSingleton(); - } - - } - - public static class LoggingFilter extends ActionFilter.Simple { - - @Inject - public LoggingFilter(Settings settings) { - super(settings); - } - - @Override - public int order() { - return 999; - } - - @Override - protected boolean apply(String action, ActionRequest request, ActionListener listener) { - requests.add(request); - return true; - } - - @Override - protected boolean apply(String action, ActionResponse response, ActionListener listener) { - return true; - } - } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 66c3376f04e..9d136807092 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -18,6 +18,10 @@ */ package org.elasticsearch.messy.tests; +import java.io.IOException; +import java.lang.reflect.Proxy; +import java.util.Collections; + import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.Accountable; @@ -31,6 +35,7 @@ import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.inject.util.Providers; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; @@ -60,17 +65,15 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; import org.junit.After; import org.junit.Before; -import java.io.IOException; -import java.lang.reflect.Proxy; -import java.util.Collections; - import static org.hamcrest.Matchers.containsString; /** @@ -100,15 +103,20 @@ public class TemplateQueryParserTests extends ESTestCase { ScriptModule scriptModule = new ScriptModule(settings); // TODO: make this use a mock engine instead of mustache and it will no longer be messy! scriptModule.addScriptEngine(MustacheScriptEngineService.class); + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), - new SettingsModule(settings, new SettingsFilter(settings)), + settingsModule, new ThreadPoolModule(new ThreadPool(settings)), - new IndicesModule() { + new SearchModule(settings, new NamedWriteableRegistry()) { @Override - public void configure() { - // skip services - bindQueryParsersExtension(); + protected void configureSearch() { + // skip so we don't need transport + } + @Override + protected void configureSuggesters() { + // skip so we don't need IndicesService } }, scriptModule, diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java index 0c489b3afb1..727d0c4316d 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java @@ -21,21 +21,14 @@ package org.elasticsearch.script.mustache; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class MustacheRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(MustachePlugin.class); - } - public MustacheRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java index bfb2b96a5ba..8da56d5a72b 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AnalysisICURestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AnalysisICUPlugin.class); - } - public AnalysisICURestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java index cb9435e430e..ae51e491d6b 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java @@ -21,23 +21,14 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.analysis.kuromoji.AnalysisKuromojiPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AnalysisKuromojiRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AnalysisKuromojiPlugin.class); - } - - public AnalysisKuromojiRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java index 3adb8202833..63122842104 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.kuromoji.AnalysisKuromojiPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.io.InputStream; @@ -202,12 +203,13 @@ public class KuromojiAnalysisTests extends ESTestCase { .loadFromStream(json, getClass().getResourceAsStream(json)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - + final SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); Index index = new Index("test"); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisKuromojiPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), + Injector parentInjector = new ModulesBuilder().add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java index 98380a07176..9d66bf24357 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.analysis.AnalysisPhoneticPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AnalysisPhoneticRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AnalysisPhoneticPlugin.class); - } - public AnalysisPhoneticRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java index 0b6a4027685..b0a93f10e4d 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.AnalysisPhoneticPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -58,7 +59,9 @@ public class SimplePhoneticAnalysisTests extends ESTestCase { Index index = new Index("test"); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisPhoneticPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); + Injector parentInjector = new ModulesBuilder().add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java index 2a76c21d353..16113b2b7ac 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.analysis.smartcn.AnalysisSmartChinesePlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AnalysisSmartChineseRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AnalysisSmartChinesePlugin.class); - } - public AnalysisSmartChineseRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java index cfc2b28ec6a..55c0912b702 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.smartcn.AnalysisSmartChinesePlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -51,7 +52,9 @@ public class SimpleSmartChineseAnalysisTests extends ESTestCase { .build(); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisSmartChinesePlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); + Injector parentInjector = new ModulesBuilder().add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java index c99ff75aebf..330ad87af74 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.analysis.stempel.AnalysisStempelPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AnalysisPolishRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AnalysisStempelPlugin.class); - } - public AnalysisPolishRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java index f3ce4326afb..02fcbd0c369 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.stempel.AnalysisStempelPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -56,7 +57,9 @@ public class PolishAnalysisTests extends ESTestCase { AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisStempelPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); + Injector parentInjector = new ModulesBuilder().add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java index a68f958580e..e091b0a0d92 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.stempel.AnalysisStempelPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.io.StringReader; @@ -100,7 +101,9 @@ public class SimplePolishTokenFilterTests extends ESTestCase { private AnalysisService createAnalysisService(Index index, Settings settings) throws IOException { AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisStempelPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); + Injector parentInjector = new ModulesBuilder().add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java b/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java index 038f117b83f..9674d541354 100644 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java +++ b/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.plugin.deletebyquery.test.rest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.deletebyquery.DeleteByQueryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class DeleteByQueryRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(DeleteByQueryPlugin.class); - } - public DeleteByQueryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index 2451672f1d1..ec4ef7cb625 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -56,8 +56,6 @@ dependencyLicenses { mapping from: /jaxb-.*/, to: 'jaxb' } -compileJava.options.compilerArgs << '-Xlint:-path,-unchecked' - thirdPartyAudit.excludes = [ // classes are missing 'javax.servlet.ServletContextEvent', diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java index 63888837cdd..131f73d1ca9 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.discovery.azure; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AzureDiscoveryRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AzureDiscoveryPlugin.class); - } - public AzureDiscoveryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java index 57021a085e6..24ccf82a3d8 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.cloud.aws; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.discovery.ec2.Ec2DiscoveryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class DiscoveryEc2RestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(Ec2DiscoveryPlugin.class); - } - public DiscoveryEc2RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 6f4459ef753..b888b817679 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -25,8 +25,6 @@ dependencyLicenses { mapping from: /google-.*/, to: 'google' } -compileJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked' - test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index 5bb5e27ce64..496a1df8a9b 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -21,6 +21,7 @@ package org.elasticsearch.plugin.discovery.gce; import com.google.api.client.http.HttpHeaders; import com.google.api.client.util.ClassInfo; + import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.gce.GceComputeService; import org.elasticsearch.cloud.gce.GceModule; @@ -91,6 +92,7 @@ public class GceDiscoveryPlugin extends Plugin { } @Override + @SuppressWarnings("rawtypes") // Supertype uses raw type public Collection> nodeServices() { Collection> services = new ArrayList<>(); if (isDiscoveryAlive(settings, logger)) { diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java index 1a218394b7d..891dd156aac 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.discovery.gce; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.discovery.gce.GceDiscoveryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class DiscoveryGCERestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(GceDiscoveryPlugin.class); - } - public DiscoveryGCERestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryRestIT.java b/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryRestIT.java index d75c038a500..c6af20c011e 100644 --- a/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryRestIT.java +++ b/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryRestIT.java @@ -21,21 +21,14 @@ package org.elasticsearch.plugin.discovery.multicast; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class MulticastDiscoveryRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(MulticastDiscoveryPlugin.class); - } - public MulticastDiscoveryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java index 2e9039ed67f..74573a79289 100644 --- a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java +++ b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java @@ -21,21 +21,14 @@ package org.elasticsearch.plugin.example; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class JvmExampleRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(JvmExamplePlugin.class); - } - public JvmExampleRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java index 18f18372b90..8039715c3d1 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.script.javascript; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.javascript.JavaScriptPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class LangJavaScriptRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(JavaScriptPlugin.class); - } - public LangJavaScriptRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/lang-plan-a/build.gradle b/plugins/lang-plan-a/build.gradle index 810f0df4e16..f40bf4fd8e7 100644 --- a/plugins/lang-plan-a/build.gradle +++ b/plugins/lang-plan-a/build.gradle @@ -35,9 +35,6 @@ dependencyLicenses { mapping from: /asm-.*/, to: 'asm' } -compileJava.options.compilerArgs << '-Xlint:-cast,-rawtypes' -compileTestJava.options.compilerArgs << '-Xlint:-unchecked' - // regeneration logic, comes in via ant right now // don't port it to gradle, it works fine. diff --git a/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 b/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 index 1b177a43381..851d924b33f 100644 --- a/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 +++ b/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 @@ -34,18 +34,23 @@ statement | CONTINUE SEMICOLON? # continue | BREAK SEMICOLON? # break | RETURN expression SEMICOLON? # return - | TRY block ( CATCH LP ( TYPE ID ) RP block )+ # try + | TRY block trap+ # try | THROW expression SEMICOLON? # throw | expression SEMICOLON? # expr ; block - : LBRACK statement* RBRACK # multiple + : LBRACK statement+ RBRACK # multiple | statement # single ; empty - : SEMICOLON + : emptyscope + | SEMICOLON + ; + +emptyscope + : LBRACK RBRACK ; initializer @@ -69,6 +74,10 @@ declvar : ID ( ASSIGN expression )? ; +trap + : CATCH LP ( TYPE ID ) RP ( block | emptyscope ) + ; + expression : LP expression RP # precedence | ( OCTAL | HEX | INTEGER | DECIMAL ) # numeric diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java deleted file mode 100644 index 9788e63c3d7..00000000000 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plan.a; - -import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.tree.ParseTree; - -import java.util.HashMap; -import java.util.Map; - -import static org.elasticsearch.plan.a.Definition.Cast; -import static org.elasticsearch.plan.a.Definition.Type; -import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; -import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; - -class Adapter { - static class StatementMetadata { - final ParserRuleContext source; - - boolean last; - - boolean allExit; - boolean allReturn; - boolean anyReturn; - boolean allBreak; - boolean anyBreak; - boolean allContinue; - boolean anyContinue; - - private StatementMetadata(final ParserRuleContext source) { - this.source = source; - - last = false; - - allExit = false; - allReturn = false; - anyReturn = false; - allBreak = false; - anyBreak = false; - allContinue = false; - anyContinue = false; - } - } - - static class ExpressionMetadata { - final ParserRuleContext source; - - boolean read; - boolean statement; - - Object preConst; - Object postConst; - boolean isNull; - - Type to; - Type from; - boolean explicit; - boolean typesafe; - - Cast cast; - - private ExpressionMetadata(final ParserRuleContext source) { - this.source = source; - - read = true; - statement = false; - - preConst = null; - postConst = null; - isNull = false; - - to = null; - from = null; - explicit = false; - typesafe = true; - - cast = null; - } - } - - static class ExternalMetadata { - final ParserRuleContext source; - - boolean read; - ParserRuleContext storeExpr; - int token; - boolean pre; - boolean post; - - int scope; - Type current; - boolean statik; - boolean statement; - Object constant; - - private ExternalMetadata(final ParserRuleContext source) { - this.source = source; - - read = false; - storeExpr = null; - token = 0; - pre = false; - post = false; - - scope = 0; - current = null; - statik = false; - statement = false; - constant = null; - } - } - - static class ExtNodeMetadata { - final ParserRuleContext parent; - final ParserRuleContext source; - - Object target; - boolean last; - - Type type; - Type promote; - - Cast castFrom; - Cast castTo; - - private ExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) { - this.parent = parent; - this.source = source; - - target = null; - last = false; - - type = null; - promote = null; - - castFrom = null; - castTo = null; - } - } - - static String error(final ParserRuleContext ctx) { - return "Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: "; - } - - final Definition definition; - final String source; - final ParserRuleContext root; - final CompilerSettings settings; - - private final Map statementMetadata; - private final Map expressionMetadata; - private final Map externalMetadata; - private final Map extNodeMetadata; - - Adapter(final Definition definition, final String source, final ParserRuleContext root, final CompilerSettings settings) { - this.definition = definition; - this.source = source; - this.root = root; - this.settings = settings; - - statementMetadata = new HashMap<>(); - expressionMetadata = new HashMap<>(); - externalMetadata = new HashMap<>(); - extNodeMetadata = new HashMap<>(); - } - - StatementMetadata createStatementMetadata(final ParserRuleContext source) { - final StatementMetadata sourcesmd = new StatementMetadata(source); - statementMetadata.put(source, sourcesmd); - - return sourcesmd; - } - - StatementMetadata getStatementMetadata(final ParserRuleContext source) { - final StatementMetadata sourcesmd = statementMetadata.get(source); - - if (sourcesmd == null) { - throw new IllegalStateException(error(source) + "Statement metadata does not exist at" + - " the parse node with text [" + source.getText() + "]."); - } - - return sourcesmd; - } - - ExpressionContext updateExpressionTree(ExpressionContext source) { - if (source instanceof PrecedenceContext) { - final ParserRuleContext parent = source.getParent(); - int index = 0; - - for (final ParseTree child : parent.children) { - if (child == source) { - break; - } - - ++index; - } - - while (source instanceof PrecedenceContext) { - source = ((PrecedenceContext)source).expression(); - } - - parent.children.set(index, source); - } - - return source; - } - - ExpressionMetadata createExpressionMetadata(ParserRuleContext source) { - final ExpressionMetadata sourceemd = new ExpressionMetadata(source); - expressionMetadata.put(source, sourceemd); - - return sourceemd; - } - - ExpressionMetadata getExpressionMetadata(final ParserRuleContext source) { - final ExpressionMetadata sourceemd = expressionMetadata.get(source); - - if (sourceemd == null) { - throw new IllegalStateException(error(source) + "Expression metadata does not exist at" + - " the parse node with text [" + source.getText() + "]."); - } - - return sourceemd; - } - - ExternalMetadata createExternalMetadata(final ParserRuleContext source) { - final ExternalMetadata sourceemd = new ExternalMetadata(source); - externalMetadata.put(source, sourceemd); - - return sourceemd; - } - - ExternalMetadata getExternalMetadata(final ParserRuleContext source) { - final ExternalMetadata sourceemd = externalMetadata.get(source); - - if (sourceemd == null) { - throw new IllegalStateException(error(source) + "External metadata does not exist at" + - " the parse node with text [" + source.getText() + "]."); - } - - return sourceemd; - } - - ExtNodeMetadata createExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) { - final ExtNodeMetadata sourceemd = new ExtNodeMetadata(parent, source); - extNodeMetadata.put(source, sourceemd); - - return sourceemd; - } - - ExtNodeMetadata getExtNodeMetadata(final ParserRuleContext source) { - final ExtNodeMetadata sourceemd = extNodeMetadata.get(source); - - if (sourceemd == null) { - throw new IllegalStateException(error(source) + "External metadata does not exist at" + - " the parse node with text [" + source.getText() + "]."); - } - - return sourceemd; - } -} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java index eb2681cfba8..77769736bf1 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java @@ -20,6 +20,72 @@ package org.elasticsearch.plan.a; import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.plan.a.Definition.Cast; +import org.elasticsearch.plan.a.Definition.Constructor; +import org.elasticsearch.plan.a.Definition.Field; +import org.elasticsearch.plan.a.Definition.Method; +import org.elasticsearch.plan.a.Definition.Pair; +import org.elasticsearch.plan.a.Definition.Sort; +import org.elasticsearch.plan.a.Definition.Struct; +import org.elasticsearch.plan.a.Definition.Transform; +import org.elasticsearch.plan.a.Definition.Type; +import org.elasticsearch.plan.a.Metadata.ExpressionMetadata; +import org.elasticsearch.plan.a.Metadata.ExtNodeMetadata; +import org.elasticsearch.plan.a.Metadata.ExternalMetadata; +import org.elasticsearch.plan.a.Metadata.StatementMetadata; +import org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; +import org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; +import org.elasticsearch.plan.a.PlanAParser.AssignmentContext; +import org.elasticsearch.plan.a.PlanAParser.BinaryContext; +import org.elasticsearch.plan.a.PlanAParser.BlockContext; +import org.elasticsearch.plan.a.PlanAParser.BoolContext; +import org.elasticsearch.plan.a.PlanAParser.BreakContext; +import org.elasticsearch.plan.a.PlanAParser.CastContext; +import org.elasticsearch.plan.a.PlanAParser.CharContext; +import org.elasticsearch.plan.a.PlanAParser.CompContext; +import org.elasticsearch.plan.a.PlanAParser.ConditionalContext; +import org.elasticsearch.plan.a.PlanAParser.ContinueContext; +import org.elasticsearch.plan.a.PlanAParser.DeclContext; +import org.elasticsearch.plan.a.PlanAParser.DeclarationContext; +import org.elasticsearch.plan.a.PlanAParser.DecltypeContext; +import org.elasticsearch.plan.a.PlanAParser.DeclvarContext; +import org.elasticsearch.plan.a.PlanAParser.DoContext; +import org.elasticsearch.plan.a.PlanAParser.EmptyContext; +import org.elasticsearch.plan.a.PlanAParser.ExprContext; +import org.elasticsearch.plan.a.PlanAParser.ExpressionContext; +import org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; +import org.elasticsearch.plan.a.PlanAParser.ExtcallContext; +import org.elasticsearch.plan.a.PlanAParser.ExtcastContext; +import org.elasticsearch.plan.a.PlanAParser.ExtdotContext; +import org.elasticsearch.plan.a.PlanAParser.ExternalContext; +import org.elasticsearch.plan.a.PlanAParser.ExtfieldContext; +import org.elasticsearch.plan.a.PlanAParser.ExtnewContext; +import org.elasticsearch.plan.a.PlanAParser.ExtprecContext; +import org.elasticsearch.plan.a.PlanAParser.ExtstartContext; +import org.elasticsearch.plan.a.PlanAParser.ExtstringContext; +import org.elasticsearch.plan.a.PlanAParser.ExttypeContext; +import org.elasticsearch.plan.a.PlanAParser.ExtvarContext; +import org.elasticsearch.plan.a.PlanAParser.FalseContext; +import org.elasticsearch.plan.a.PlanAParser.ForContext; +import org.elasticsearch.plan.a.PlanAParser.IfContext; +import org.elasticsearch.plan.a.PlanAParser.IncrementContext; +import org.elasticsearch.plan.a.PlanAParser.InitializerContext; +import org.elasticsearch.plan.a.PlanAParser.MultipleContext; +import org.elasticsearch.plan.a.PlanAParser.NullContext; +import org.elasticsearch.plan.a.PlanAParser.NumericContext; +import org.elasticsearch.plan.a.PlanAParser.PostincContext; +import org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; +import org.elasticsearch.plan.a.PlanAParser.PreincContext; +import org.elasticsearch.plan.a.PlanAParser.ReturnContext; +import org.elasticsearch.plan.a.PlanAParser.SingleContext; +import org.elasticsearch.plan.a.PlanAParser.SourceContext; +import org.elasticsearch.plan.a.PlanAParser.StatementContext; +import org.elasticsearch.plan.a.PlanAParser.ThrowContext; +import org.elasticsearch.plan.a.PlanAParser.TrapContext; +import org.elasticsearch.plan.a.PlanAParser.TrueContext; +import org.elasticsearch.plan.a.PlanAParser.TryContext; +import org.elasticsearch.plan.a.PlanAParser.UnaryContext; +import org.elasticsearch.plan.a.PlanAParser.WhileContext; import java.util.ArrayDeque; import java.util.Arrays; @@ -28,81 +94,18 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.plan.a.Adapter.ExpressionMetadata; -import static org.elasticsearch.plan.a.Adapter.ExtNodeMetadata; -import static org.elasticsearch.plan.a.Adapter.ExternalMetadata; -import static org.elasticsearch.plan.a.Adapter.StatementMetadata; -import static org.elasticsearch.plan.a.Adapter.error; -import static org.elasticsearch.plan.a.Definition.Cast; -import static org.elasticsearch.plan.a.Definition.Constructor; -import static org.elasticsearch.plan.a.Definition.Field; -import static org.elasticsearch.plan.a.Definition.Method; -import static org.elasticsearch.plan.a.Definition.Pair; -import static org.elasticsearch.plan.a.Definition.Sort; -import static org.elasticsearch.plan.a.Definition.Struct; -import static org.elasticsearch.plan.a.Definition.Transform; -import static org.elasticsearch.plan.a.Definition.Type; +import static org.elasticsearch.plan.a.Metadata.error; import static org.elasticsearch.plan.a.PlanAParser.ADD; -import static org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; -import static org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; -import static org.elasticsearch.plan.a.PlanAParser.AssignmentContext; import static org.elasticsearch.plan.a.PlanAParser.BWAND; import static org.elasticsearch.plan.a.PlanAParser.BWOR; import static org.elasticsearch.plan.a.PlanAParser.BWXOR; -import static org.elasticsearch.plan.a.PlanAParser.BinaryContext; -import static org.elasticsearch.plan.a.PlanAParser.BlockContext; -import static org.elasticsearch.plan.a.PlanAParser.BoolContext; -import static org.elasticsearch.plan.a.PlanAParser.BreakContext; -import static org.elasticsearch.plan.a.PlanAParser.CastContext; -import static org.elasticsearch.plan.a.PlanAParser.CharContext; -import static org.elasticsearch.plan.a.PlanAParser.CompContext; -import static org.elasticsearch.plan.a.PlanAParser.ConditionalContext; -import static org.elasticsearch.plan.a.PlanAParser.ContinueContext; import static org.elasticsearch.plan.a.PlanAParser.DIV; -import static org.elasticsearch.plan.a.PlanAParser.DeclContext; -import static org.elasticsearch.plan.a.PlanAParser.DeclarationContext; -import static org.elasticsearch.plan.a.PlanAParser.DecltypeContext; -import static org.elasticsearch.plan.a.PlanAParser.DeclvarContext; -import static org.elasticsearch.plan.a.PlanAParser.DoContext; -import static org.elasticsearch.plan.a.PlanAParser.EmptyContext; -import static org.elasticsearch.plan.a.PlanAParser.ExprContext; -import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtcallContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtcastContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtdotContext; -import static org.elasticsearch.plan.a.PlanAParser.ExternalContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtfieldContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtnewContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtprecContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtstartContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtstringContext; -import static org.elasticsearch.plan.a.PlanAParser.ExttypeContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtvarContext; -import static org.elasticsearch.plan.a.PlanAParser.FalseContext; -import static org.elasticsearch.plan.a.PlanAParser.ForContext; -import static org.elasticsearch.plan.a.PlanAParser.IfContext; -import static org.elasticsearch.plan.a.PlanAParser.IncrementContext; -import static org.elasticsearch.plan.a.PlanAParser.InitializerContext; import static org.elasticsearch.plan.a.PlanAParser.LSH; import static org.elasticsearch.plan.a.PlanAParser.MUL; -import static org.elasticsearch.plan.a.PlanAParser.MultipleContext; -import static org.elasticsearch.plan.a.PlanAParser.NullContext; -import static org.elasticsearch.plan.a.PlanAParser.NumericContext; -import static org.elasticsearch.plan.a.PlanAParser.PostincContext; -import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; -import static org.elasticsearch.plan.a.PlanAParser.PreincContext; import static org.elasticsearch.plan.a.PlanAParser.REM; import static org.elasticsearch.plan.a.PlanAParser.RSH; -import static org.elasticsearch.plan.a.PlanAParser.ReturnContext; import static org.elasticsearch.plan.a.PlanAParser.SUB; -import static org.elasticsearch.plan.a.PlanAParser.SingleContext; -import static org.elasticsearch.plan.a.PlanAParser.SourceContext; -import static org.elasticsearch.plan.a.PlanAParser.StatementContext; -import static org.elasticsearch.plan.a.PlanAParser.TrueContext; import static org.elasticsearch.plan.a.PlanAParser.USH; -import static org.elasticsearch.plan.a.PlanAParser.UnaryContext; -import static org.elasticsearch.plan.a.PlanAParser.WhileContext; class Analyzer extends PlanAParserBaseVisitor { private static class Variable { @@ -117,31 +120,30 @@ class Analyzer extends PlanAParserBaseVisitor { } } - static void analyze(final Adapter adapter) { - new Analyzer(adapter); + static void analyze(final Metadata metadata) { + new Analyzer(metadata); } - private final Adapter adapter; + private final Metadata metadata; private final Definition definition; private final CompilerSettings settings; - private final Deque scopes; - private final Deque variables; + private final Deque scopes = new ArrayDeque<>(); + private final Deque variables = new ArrayDeque<>(); - private Analyzer(final Adapter adapter) { - this.adapter = adapter; - definition = adapter.definition; - settings = adapter.settings; - - scopes = new ArrayDeque<>(); - variables = new ArrayDeque<>(); + private Analyzer(final Metadata metadata) { + this.metadata = metadata; + definition = metadata.definition; + settings = metadata.settings; incrementScope(); - addVariable(null, "this", definition.execType); - addVariable(null, "input", definition.smapType); + addVariable(null, "#this", definition.execType); + metadata.inputValueSlot = addVariable(null, "input", definition.smapType).slot; + metadata.scoreValueSlot = addVariable(null, "_score", definition.floatType).slot; + metadata.loopCounterSlot = addVariable(null, "#loop", definition.intType).slot; - adapter.createStatementMetadata(adapter.root); - visit(adapter.root); + metadata.createStatementMetadata(metadata.root); + visit(metadata.root); decrementScope(); } @@ -179,7 +181,7 @@ class Analyzer extends PlanAParserBaseVisitor { throw new IllegalArgumentException("Argument name [" + name + "] already defined within the scope."); } else { throw new IllegalArgumentException( - error(source) + "Variable name [" + name + "] already defined within the scope."); + error(source) + "Variable name [" + name + "] already defined within the scope."); } } @@ -201,34 +203,24 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitSource(final SourceContext ctx) { - final StatementMetadata sourcesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); final List statectxs = ctx.statement(); final StatementContext lastctx = statectxs.get(statectxs.size() - 1); incrementScope(); for (final StatementContext statectx : statectxs) { - if (sourcesmd.allExit) { + if (sourcesmd.allLast) { throw new IllegalArgumentException(error(statectx) + - "Statement will never be executed because all prior paths exit."); + "Statement will never be executed because all prior paths escape."); } - final StatementMetadata statesmd = adapter.createStatementMetadata(statectx); - statesmd.last = statectx == lastctx; + final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + statesmd.lastSource = statectx == lastctx; visit(statectx); - if (statesmd.anyContinue) { - throw new IllegalArgumentException(error(statectx) + - "Cannot have a continue statement outside of a loop."); - } - - if (statesmd.anyBreak) { - throw new IllegalArgumentException(error(statectx) + - "Cannot have a break statement outside of a loop."); - } - - sourcesmd.allExit = statesmd.allExit; - sourcesmd.allReturn = statesmd.allReturn; + sourcesmd.methodEscape = statesmd.methodEscape; + sourcesmd.allLast = statesmd.allLast; } decrementScope(); @@ -238,12 +230,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitIf(final IfContext ctx) { - final StatementMetadata ifsmd = adapter.getStatementMetadata(ctx); + final StatementMetadata ifsmd = metadata.getStatementMetadata(ctx); - incrementScope(); - - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); @@ -253,84 +243,85 @@ class Analyzer extends PlanAParserBaseVisitor { } final BlockContext blockctx0 = ctx.block(0); - final StatementMetadata blocksmd0 = adapter.createStatementMetadata(blockctx0); - blocksmd0.last = ifsmd.last; + final StatementMetadata blocksmd0 = metadata.createStatementMetadata(blockctx0); + blocksmd0.lastSource = ifsmd.lastSource; + blocksmd0.inLoop = ifsmd.inLoop; + blocksmd0.lastLoop = ifsmd.lastLoop; + incrementScope(); visit(blockctx0); + decrementScope(); - ifsmd.anyReturn = blocksmd0.anyReturn; - ifsmd.anyBreak = blocksmd0.anyBreak; ifsmd.anyContinue = blocksmd0.anyContinue; + ifsmd.anyBreak = blocksmd0.anyBreak; + + ifsmd.count = blocksmd0.count; if (ctx.ELSE() != null) { final BlockContext blockctx1 = ctx.block(1); - final StatementMetadata blocksmd1 = adapter.createStatementMetadata(blockctx1); - blocksmd1.last = ifsmd.last; + final StatementMetadata blocksmd1 = metadata.createStatementMetadata(blockctx1); + blocksmd1.lastSource = ifsmd.lastSource; + incrementScope(); visit(blockctx1); + decrementScope(); - ifsmd.allExit = blocksmd0.allExit && blocksmd1.allExit; - ifsmd.allReturn = blocksmd0.allReturn && blocksmd1.allReturn; - ifsmd.anyReturn |= blocksmd1.anyReturn; - ifsmd.allBreak = blocksmd0.allBreak && blocksmd1.allBreak; - ifsmd.anyBreak |= blocksmd1.anyBreak; - ifsmd.allContinue = blocksmd0.allContinue && blocksmd1.allContinue; + ifsmd.methodEscape = blocksmd0.methodEscape && blocksmd1.methodEscape; + ifsmd.loopEscape = blocksmd0.loopEscape && blocksmd1.loopEscape; + ifsmd.allLast = blocksmd0.allLast && blocksmd1.allLast; ifsmd.anyContinue |= blocksmd1.anyContinue; - } + ifsmd.anyBreak |= blocksmd1.anyBreak; - decrementScope(); + ifsmd.count = Math.max(ifsmd.count, blocksmd1.count); + } return null; } @Override public Void visitWhile(final WhileContext ctx) { - final StatementMetadata whilesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata whilesmd = metadata.getStatementMetadata(ctx); incrementScope(); - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); - boolean exitrequired = false; + boolean continuous = false; if (expremd.postConst != null) { - boolean constant = (boolean)expremd.postConst; + continuous = (boolean)expremd.postConst; - if (!constant) { + if (!continuous) { throw new IllegalArgumentException(error(ctx) + "The loop will never be executed."); } - exitrequired = true; + if (ctx.empty() != null) { + throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + } } final BlockContext blockctx = ctx.block(); if (blockctx != null) { - final StatementMetadata blocksmd = adapter.createStatementMetadata(blockctx); + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.beginLoop = true; + blocksmd.inLoop = true; visit(blockctx); - if (blocksmd.allReturn) { - throw new IllegalArgumentException(error(ctx) + "All paths return so the loop is not necessary."); + if (blocksmd.loopEscape && !blocksmd.anyContinue) { + throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); } - if (blocksmd.allBreak) { - throw new IllegalArgumentException(error(ctx) + "All paths break so the loop is not necessary."); + if (continuous && !blocksmd.anyBreak) { + whilesmd.methodEscape = true; + whilesmd.allLast = true; } - - if (exitrequired && !blocksmd.anyReturn && !blocksmd.anyBreak) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); - } - - if (exitrequired && blocksmd.anyReturn && !blocksmd.anyBreak) { - whilesmd.allExit = true; - whilesmd.allReturn = true; - } - } else if (exitrequired) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); } + whilesmd.count = 1; + decrementScope(); return null; @@ -338,49 +329,41 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDo(final DoContext ctx) { - final StatementMetadata dosmd = adapter.getStatementMetadata(ctx); + final StatementMetadata dosmd = metadata.getStatementMetadata(ctx); incrementScope(); final BlockContext blockctx = ctx.block(); - final StatementMetadata blocksmd = adapter.createStatementMetadata(blockctx); + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.beginLoop = true; + blocksmd.inLoop = true; visit(blockctx); - if (blocksmd.allReturn) { - throw new IllegalArgumentException(error(ctx) + "All paths return so the loop is not necessary."); + if (blocksmd.loopEscape && !blocksmd.anyContinue) { + throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); } - if (blocksmd.allBreak) { - throw new IllegalArgumentException(error(ctx) + "All paths break so the loop is not necessary."); - } - - if (blocksmd.allContinue) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); - } - - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); if (expremd.postConst != null) { - final boolean exitrequired = (boolean)expremd.postConst; + final boolean continuous = (boolean)expremd.postConst; - if (exitrequired && !blocksmd.anyReturn && !blocksmd.anyBreak) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + if (!continuous) { + throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); } - if (exitrequired && blocksmd.anyReturn && !blocksmd.anyBreak) { - dosmd.allExit = true; - dosmd.allReturn = true; - } - - if (!exitrequired && !blocksmd.anyContinue) { - throw new IllegalArgumentException(error(ctx) + "All paths exit so the loop is not necessary."); + if (!blocksmd.anyBreak) { + dosmd.methodEscape = true; + dosmd.allLast = true; } } + dosmd.count = 1; + decrementScope(); return null; @@ -388,72 +371,68 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitFor(final ForContext ctx) { - final StatementMetadata forsmd = adapter.getStatementMetadata(ctx); - boolean exitrequired = false; + final StatementMetadata forsmd = metadata.getStatementMetadata(ctx); + boolean continuous = false; incrementScope(); final InitializerContext initctx = ctx.initializer(); if (initctx != null) { - adapter.createStatementMetadata(initctx); + metadata.createStatementMetadata(initctx); visit(initctx); } - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (exprctx != null) { - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); if (expremd.postConst != null) { - boolean constant = (boolean)expremd.postConst; + continuous = (boolean)expremd.postConst; - if (!constant) { + if (!continuous) { throw new IllegalArgumentException(error(ctx) + "The loop will never be executed."); } - exitrequired = true; + if (ctx.empty() != null) { + throw new IllegalArgumentException(error(ctx) + "The loop is continuous."); + } } } else { - exitrequired = true; + continuous = true; } final AfterthoughtContext atctx = ctx.afterthought(); if (atctx != null) { - adapter.createStatementMetadata(atctx); + metadata.createStatementMetadata(atctx); visit(atctx); } final BlockContext blockctx = ctx.block(); if (blockctx != null) { - final StatementMetadata blocksmd = adapter.createStatementMetadata(blockctx); + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.beginLoop = true; + blocksmd.inLoop = true; visit(blockctx); - if (blocksmd.allReturn) { - throw new IllegalArgumentException(error(ctx) + "All paths return so the loop is not necessary."); + if (blocksmd.loopEscape && !blocksmd.anyContinue) { + throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); } - if (blocksmd.allBreak) { - throw new IllegalArgumentException(error(ctx) + "All paths break so the loop is not necessary."); + if (continuous && !blocksmd.anyBreak) { + forsmd.methodEscape = true; + forsmd.allLast = true; } - - if (exitrequired && !blocksmd.anyReturn && !blocksmd.anyBreak) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); - } - - if (exitrequired && blocksmd.anyReturn && !blocksmd.anyBreak) { - forsmd.allExit = true; - forsmd.allReturn = true; - } - } else if (exitrequired) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); } + forsmd.count = 1; + decrementScope(); return null; @@ -461,97 +440,185 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDecl(final DeclContext ctx) { + final StatementMetadata declsmd = metadata.getStatementMetadata(ctx); + final DeclarationContext declctx = ctx.declaration(); - adapter.createStatementMetadata(declctx); + metadata.createStatementMetadata(declctx); visit(declctx); + declsmd.count = 1; + return null; } @Override public Void visitContinue(final ContinueContext ctx) { - final StatementMetadata continuesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata continuesmd = metadata.getStatementMetadata(ctx); - continuesmd.allExit = true; - continuesmd.allContinue = true; + if (!continuesmd.inLoop) { + throw new IllegalArgumentException(error(ctx) + "Cannot have a continue statement outside of a loop."); + } + + if (continuesmd.lastLoop) { + throw new IllegalArgumentException(error(ctx) + "Unnessary continue statement at the end of a loop."); + } + + continuesmd.allLast = true; continuesmd.anyContinue = true; + continuesmd.count = 1; + return null; } @Override public Void visitBreak(final BreakContext ctx) { - final StatementMetadata breaksmd = adapter.getStatementMetadata(ctx); + final StatementMetadata breaksmd = metadata.getStatementMetadata(ctx); - breaksmd.allExit = true; - breaksmd.allBreak = true; + if (!breaksmd.inLoop) { + throw new IllegalArgumentException(error(ctx) + "Cannot have a break statement outside of a loop."); + } + + breaksmd.loopEscape = true; + breaksmd.allLast = true; breaksmd.anyBreak = true; + breaksmd.count = 1; + return null; } @Override public Void visitReturn(final ReturnContext ctx) { - final StatementMetadata returnsmd = adapter.getStatementMetadata(ctx); + final StatementMetadata returnsmd = metadata.getStatementMetadata(ctx); - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.objectType; visit(exprctx); markCast(expremd); - returnsmd.allExit = true; - returnsmd.allReturn = true; - returnsmd.anyReturn = true; + returnsmd.methodEscape = true; + returnsmd.loopEscape = true; + returnsmd.allLast = true; + + returnsmd.count = 1; + + return null; + } + + @Override + public Void visitTry(final TryContext ctx) { + final StatementMetadata trysmd = metadata.getStatementMetadata(ctx); + + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.lastSource = trysmd.lastSource; + blocksmd.inLoop = trysmd.inLoop; + blocksmd.lastLoop = trysmd.lastLoop; + incrementScope(); + visit(blockctx); + decrementScope(); + + trysmd.methodEscape = blocksmd.methodEscape; + trysmd.loopEscape = blocksmd.loopEscape; + trysmd.allLast = blocksmd.allLast; + trysmd.anyContinue = blocksmd.anyContinue; + trysmd.anyBreak = blocksmd.anyBreak; + + int trapcount = 0; + + for (final TrapContext trapctx : ctx.trap()) { + final StatementMetadata trapsmd = metadata.createStatementMetadata(trapctx); + trapsmd.lastSource = trysmd.lastSource; + trapsmd.inLoop = trysmd.inLoop; + trapsmd.lastLoop = trysmd.lastLoop; + incrementScope(); + visit(trapctx); + decrementScope(); + + trysmd.methodEscape &= trapsmd.methodEscape; + trysmd.loopEscape &= trapsmd.loopEscape; + trysmd.allLast &= trapsmd.allLast; + trysmd.anyContinue |= trapsmd.anyContinue; + trysmd.anyBreak |= trapsmd.anyBreak; + + trapcount = Math.max(trapcount, trapsmd.count); + } + + trysmd.count = blocksmd.count + trapcount; + + return null; + } + + @Override + public Void visitThrow(final ThrowContext ctx) { + final StatementMetadata throwsmd = metadata.getStatementMetadata(ctx); + + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = definition.exceptionType; + visit(exprctx); + markCast(expremd); + + throwsmd.methodEscape = true; + throwsmd.loopEscape = true; + throwsmd.allLast = true; + + throwsmd.count = 1; return null; } @Override public Void visitExpr(final ExprContext ctx) { - final StatementMetadata exprsmd = adapter.getStatementMetadata(ctx); - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); - expremd.read = exprsmd.last; + final StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.read = exprsmd.lastSource; visit(exprctx); - if (!expremd.statement && !exprsmd.last) { + if (!expremd.statement && !exprsmd.lastSource) { throw new IllegalArgumentException(error(ctx) + "Not a statement."); } - final boolean rtn = exprsmd.last && expremd.from.sort != Sort.VOID; - exprsmd.allExit = rtn; - exprsmd.allReturn = rtn; - exprsmd.anyReturn = rtn; + final boolean rtn = exprsmd.lastSource && expremd.from.sort != Sort.VOID; + exprsmd.methodEscape = rtn; + exprsmd.loopEscape = rtn; + exprsmd.allLast = rtn; expremd.to = rtn ? definition.objectType : expremd.from; markCast(expremd); + exprsmd.count = 1; + return null; } @Override public Void visitMultiple(final MultipleContext ctx) { - final StatementMetadata multiplesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata multiplesmd = metadata.getStatementMetadata(ctx); final List statectxs = ctx.statement(); final StatementContext lastctx = statectxs.get(statectxs.size() - 1); for (StatementContext statectx : statectxs) { - if (multiplesmd.allExit) { + if (multiplesmd.allLast) { throw new IllegalArgumentException(error(statectx) + - "Statement will never be executed because all prior paths exit."); + "Statement will never be executed because all prior paths escape."); } - final StatementMetadata statesmd = adapter.createStatementMetadata(statectx); - statesmd.last = multiplesmd.last && statectx == lastctx; + final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + statesmd.lastSource = multiplesmd.lastSource && statectx == lastctx; + statesmd.inLoop = multiplesmd.inLoop; + statesmd.lastLoop = (multiplesmd.beginLoop || multiplesmd.lastLoop) && statectx == lastctx; visit(statectx); - multiplesmd.allExit = statesmd.allExit; - multiplesmd.allReturn = statesmd.allReturn && !statesmd.anyBreak && !statesmd.anyContinue; - multiplesmd.anyReturn |= statesmd.anyReturn; - multiplesmd.allBreak = !statesmd.anyReturn && statesmd.allBreak && !statesmd.anyContinue; - multiplesmd.anyBreak |= statesmd.anyBreak; - multiplesmd.allContinue = !statesmd.anyReturn && !statesmd.anyBreak && statesmd.allContinue; + multiplesmd.methodEscape = statesmd.methodEscape; + multiplesmd.loopEscape = statesmd.loopEscape; + multiplesmd.allLast = statesmd.allLast; multiplesmd.anyContinue |= statesmd.anyContinue; + multiplesmd.anyBreak |= statesmd.anyBreak; + + multiplesmd.count += statesmd.count; } return null; @@ -559,20 +626,22 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitSingle(final SingleContext ctx) { - final StatementMetadata singlesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata singlesmd = metadata.getStatementMetadata(ctx); final StatementContext statectx = ctx.statement(); - final StatementMetadata statesmd = adapter.createStatementMetadata(statectx); - statesmd.last = singlesmd.last; + final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + statesmd.lastSource = singlesmd.lastSource; + statesmd.inLoop = singlesmd.inLoop; + statesmd.lastLoop = singlesmd.beginLoop || singlesmd.lastLoop; visit(statectx); - singlesmd.allExit = statesmd.allExit; - singlesmd.allReturn = statesmd.allReturn; - singlesmd.anyReturn = statesmd.anyReturn; - singlesmd.allBreak = statesmd.allBreak; - singlesmd.anyBreak = statesmd.anyBreak; - singlesmd.allContinue = statesmd.allContinue; + singlesmd.methodEscape = statesmd.methodEscape; + singlesmd.loopEscape = statesmd.loopEscape; + singlesmd.allLast = statesmd.allLast; singlesmd.anyContinue = statesmd.anyContinue; + singlesmd.anyBreak = statesmd.anyBreak; + + singlesmd.count = statesmd.count; return null; } @@ -585,13 +654,13 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitInitializer(InitializerContext ctx) { final DeclarationContext declctx = ctx.declaration(); - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (declctx != null) { - adapter.createStatementMetadata(declctx); + metadata.createStatementMetadata(declctx); visit(declctx); } else if (exprctx != null) { - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.read = false; visit(exprctx); @@ -600,7 +669,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (!expremd.statement) { throw new IllegalArgumentException(error(exprctx) + - "The intializer of a for loop must be a statement."); + "The intializer of a for loop must be a statement."); } } else { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -611,10 +680,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitAfterthought(AfterthoughtContext ctx) { - ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (exprctx != null) { - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.read = false; visit(exprctx); @@ -623,7 +692,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (!expremd.statement) { throw new IllegalArgumentException(error(exprctx) + - "The afterthought of a for loop must be a statement."); + "The afterthought of a for loop must be a statement."); } } @@ -633,11 +702,11 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDeclaration(final DeclarationContext ctx) { final DecltypeContext decltypectx = ctx.decltype(); - final ExpressionMetadata decltypeemd = adapter.createExpressionMetadata(decltypectx); + final ExpressionMetadata decltypeemd = metadata.createExpressionMetadata(decltypectx); visit(decltypectx); for (final DeclvarContext declvarctx : ctx.declvar()) { - final ExpressionMetadata declvaremd = adapter.createExpressionMetadata(declvarctx); + final ExpressionMetadata declvaremd = metadata.createExpressionMetadata(declvarctx); declvaremd.to = decltypeemd.from; visit(declvarctx); } @@ -647,7 +716,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDecltype(final DecltypeContext ctx) { - final ExpressionMetadata decltypeemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata decltypeemd = metadata.getExpressionMetadata(ctx); final String name = ctx.getText(); decltypeemd.from = definition.getType(name); @@ -657,15 +726,15 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDeclvar(final DeclvarContext ctx) { - final ExpressionMetadata declvaremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); final String name = ctx.ID().getText(); declvaremd.postConst = addVariable(ctx, name, declvaremd.to).slot; - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (exprctx != null) { - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = declvaremd.to; visit(exprctx); markCast(expremd); @@ -674,6 +743,43 @@ class Analyzer extends PlanAParserBaseVisitor { return null; } + @Override + public Void visitTrap(final TrapContext ctx) { + final StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); + + final String type = ctx.TYPE().getText(); + trapsmd.exception = definition.getType(type); + + try { + trapsmd.exception.clazz.asSubclass(Exception.class); + } catch (final ClassCastException exception) { + throw new IllegalArgumentException(error(ctx) + "Invalid exception type [" + trapsmd.exception.name + "]."); + } + + final String id = ctx.ID().getText(); + trapsmd.slot = addVariable(ctx, id, trapsmd.exception).slot; + + final BlockContext blockctx = ctx.block(); + + if (blockctx != null) { + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.lastSource = trapsmd.lastSource; + blocksmd.inLoop = trapsmd.inLoop; + blocksmd.lastLoop = trapsmd.lastLoop; + visit(blockctx); + + trapsmd.methodEscape = blocksmd.methodEscape; + trapsmd.loopEscape = blocksmd.loopEscape; + trapsmd.allLast = blocksmd.allLast; + trapsmd.anyContinue = blocksmd.anyContinue; + trapsmd.anyBreak = blocksmd.anyBreak; + } else if (ctx.emptyscope() == null) { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + return null; + } + @Override public Void visitPrecedence(final PrecedenceContext ctx) { throw new UnsupportedOperationException(error(ctx) + "Unexpected parser state."); @@ -681,7 +787,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitNumeric(final NumericContext ctx) { - final ExpressionMetadata numericemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); final boolean negate = ctx.parent instanceof UnaryContext && ((UnaryContext)ctx.parent).SUB() != null; if (ctx.DECIMAL() != null) { @@ -770,7 +876,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitChar(final CharContext ctx) { - final ExpressionMetadata charemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); if (ctx.CHAR() == null) { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -784,7 +890,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitTrue(final TrueContext ctx) { - final ExpressionMetadata trueemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); if (ctx.TRUE() == null) { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -798,7 +904,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitFalse(final FalseContext ctx) { - final ExpressionMetadata falseemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); if (ctx.FALSE() == null) { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -812,7 +918,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitNull(final NullContext ctx) { - final ExpressionMetadata nullemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); if (ctx.NULL() == null) { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -835,10 +941,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExternal(final ExternalContext ctx) { - final ExpressionMetadata extemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata extemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = extemd.read; visit(extstartctx); @@ -852,10 +958,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitPostinc(final PostincContext ctx) { - final ExpressionMetadata postincemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata postincemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = postincemd.read; extstartemd.storeExpr = ctx.increment(); extstartemd.token = ADD; @@ -871,10 +977,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitPreinc(final PreincContext ctx) { - final ExpressionMetadata preincemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata preincemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = preincemd.read; extstartemd.storeExpr = ctx.increment(); extstartemd.token = ADD; @@ -890,10 +996,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitUnary(final UnaryContext ctx) { - final ExpressionMetadata unaryemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); if (ctx.BOOLNOT() != null) { expremd.to = definition.booleanType; @@ -912,7 +1018,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (promote == null) { throw new ClassCastException("Cannot apply [" + ctx.getChild(0).getText() + "] " + - "operation to type [" + expremd.from.name + "]."); + "operation to type [" + expremd.from.name + "]."); } expremd.to = promote; @@ -981,17 +1087,17 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitCast(final CastContext ctx) { - final ExpressionMetadata castemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); final DecltypeContext decltypectx = ctx.decltype(); - final ExpressionMetadata decltypemd = adapter.createExpressionMetadata(decltypectx); + final ExpressionMetadata decltypemd = metadata.createExpressionMetadata(decltypectx); visit(decltypectx); final Type type = decltypemd.from; castemd.from = type; - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = type; expremd.explicit = true; visit(exprctx); @@ -1008,26 +1114,26 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitBinary(final BinaryContext ctx) { - final ExpressionMetadata binaryemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); - final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); visit(exprctx0); - final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); visit(exprctx1); final boolean decimal = ctx.MUL() != null || ctx.DIV() != null || ctx.REM() != null || ctx.SUB() != null; final boolean add = ctx.ADD() != null; final boolean xor = ctx.BWXOR() != null; final Type promote = add ? promoteAdd(expremd0.from, expremd1.from) : - xor ? promoteXor(expremd0.from, expremd1.from) : - promoteNumeric(expremd0.from, expremd1.from, decimal, true); + xor ? promoteXor(expremd0.from, expremd1.from) : + promoteNumeric(expremd0.from, expremd1.from, decimal, true); if (promote == null) { throw new ClassCastException("Cannot apply [" + ctx.getChild(1).getText() + "] " + - "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); + "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); } final Sort sort = promote.sort; @@ -1234,16 +1340,16 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitComp(final CompContext ctx) { - final ExpressionMetadata compemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); final boolean equality = ctx.EQ() != null || ctx.NE() != null; final boolean reference = ctx.EQR() != null || ctx.NER() != null; - final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); visit(exprctx0); - final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); visit(exprctx1); if (expremd0.isNull && expremd1.isNull) { @@ -1251,12 +1357,12 @@ class Analyzer extends PlanAParserBaseVisitor { } final Type promote = equality ? promoteEquality(expremd0.from, expremd1.from) : - reference ? promoteReference(expremd0.from, expremd1.from) : - promoteNumeric(expremd0.from, expremd1.from, true, true); + reference ? promoteReference(expremd0.from, expremd1.from) : + promoteNumeric(expremd0.from, expremd1.from, true, true); if (promote == null) { throw new ClassCastException("Cannot apply [" + ctx.getChild(1).getText() + "] " + - "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); + "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); } expremd0.to = promote; @@ -1356,16 +1462,16 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitBool(final BoolContext ctx) { - final ExpressionMetadata boolemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); - final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); expremd0.to = definition.booleanType; visit(exprctx0); markCast(expremd0); - final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); expremd1.to = definition.booleanType; visit(exprctx1); markCast(expremd1); @@ -1388,10 +1494,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitConditional(final ConditionalContext ctx) { - final ExpressionMetadata condemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); - final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); expremd0.to = definition.booleanType; visit(exprctx0); markCast(expremd0); @@ -1400,14 +1506,14 @@ class Analyzer extends PlanAParserBaseVisitor { throw new IllegalArgumentException(error(ctx) + "Unnecessary conditional statement."); } - final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); expremd1.to = condemd.to; expremd1.explicit = condemd.explicit; visit(exprctx1); - final ExpressionContext exprctx2 = adapter.updateExpressionTree(ctx.expression(2)); - final ExpressionMetadata expremd2 = adapter.createExpressionMetadata(exprctx2); + final ExpressionContext exprctx2 = metadata.updateExpressionTree(ctx.expression(2)); + final ExpressionMetadata expremd2 = metadata.createExpressionMetadata(exprctx2); expremd2.to = condemd.to; expremd2.explicit = condemd.explicit; visit(exprctx2); @@ -1432,13 +1538,13 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitAssignment(final AssignmentContext ctx) { - final ExpressionMetadata assignemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata assignemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = assignemd.read; - extstartemd.storeExpr = adapter.updateExpressionTree(ctx.expression()); + extstartemd.storeExpr = metadata.updateExpressionTree(ctx.expression()); if (ctx.AMUL() != null) { extstartemd.token = MUL; @@ -1483,22 +1589,22 @@ class Analyzer extends PlanAParserBaseVisitor { final ExtstringContext stringctx = ctx.extstring(); if (precctx != null) { - adapter.createExtNodeMetadata(ctx, precctx); + metadata.createExtNodeMetadata(ctx, precctx); visit(precctx); } else if (castctx != null) { - adapter.createExtNodeMetadata(ctx, castctx); + metadata.createExtNodeMetadata(ctx, castctx); visit(castctx); } else if (typectx != null) { - adapter.createExtNodeMetadata(ctx, typectx); + metadata.createExtNodeMetadata(ctx, typectx); visit(typectx); } else if (varctx != null) { - adapter.createExtNodeMetadata(ctx, varctx); + metadata.createExtNodeMetadata(ctx, varctx); visit(varctx); } else if (newctx != null) { - adapter.createExtNodeMetadata(ctx, newctx); + metadata.createExtNodeMetadata(ctx, newctx); visit(newctx); } else if (stringctx != null) { - adapter.createExtNodeMetadata(ctx, stringctx); + metadata.createExtNodeMetadata(ctx, stringctx); visit(stringctx); } else { throw new IllegalStateException(); @@ -1509,9 +1615,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtprec(final ExtprecContext ctx) { - final ExtNodeMetadata precenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata precenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = precenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtprecContext precctx = ctx.extprec(); final ExtcastContext castctx = ctx.extcast(); @@ -1528,22 +1634,22 @@ class Analyzer extends PlanAParserBaseVisitor { } if (precctx != null) { - adapter.createExtNodeMetadata(parent, precctx); + metadata.createExtNodeMetadata(parent, precctx); visit(precctx); } else if (castctx != null) { - adapter.createExtNodeMetadata(parent, castctx); + metadata.createExtNodeMetadata(parent, castctx); visit(castctx); } else if (typectx != null) { - adapter.createExtNodeMetadata(parent, typectx); + metadata.createExtNodeMetadata(parent, typectx); visit(typectx); } else if (varctx != null) { - adapter.createExtNodeMetadata(parent, varctx); + metadata.createExtNodeMetadata(parent, varctx); visit(varctx); } else if (newctx != null) { - adapter.createExtNodeMetadata(parent, newctx); + metadata.createExtNodeMetadata(parent, newctx); visit(newctx); } else if (stringctx != null) { - adapter.createExtNodeMetadata(ctx, stringctx); + metadata.createExtNodeMetadata(ctx, stringctx); visit(stringctx); } else { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -1554,12 +1660,12 @@ class Analyzer extends PlanAParserBaseVisitor { if (dotctx != null) { --parentemd.scope; - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { --parentemd.scope; - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -1568,9 +1674,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtcast(final ExtcastContext ctx) { - final ExtNodeMetadata castenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = castenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtprecContext precctx = ctx.extprec(); final ExtcastContext castctx = ctx.extcast(); @@ -1580,29 +1686,29 @@ class Analyzer extends PlanAParserBaseVisitor { final ExtstringContext stringctx = ctx.extstring(); if (precctx != null) { - adapter.createExtNodeMetadata(parent, precctx); + metadata.createExtNodeMetadata(parent, precctx); visit(precctx); } else if (castctx != null) { - adapter.createExtNodeMetadata(parent, castctx); + metadata.createExtNodeMetadata(parent, castctx); visit(castctx); } else if (typectx != null) { - adapter.createExtNodeMetadata(parent, typectx); + metadata.createExtNodeMetadata(parent, typectx); visit(typectx); } else if (varctx != null) { - adapter.createExtNodeMetadata(parent, varctx); + metadata.createExtNodeMetadata(parent, varctx); visit(varctx); } else if (newctx != null) { - adapter.createExtNodeMetadata(parent, newctx); + metadata.createExtNodeMetadata(parent, newctx); visit(newctx); } else if (stringctx != null) { - adapter.createExtNodeMetadata(ctx, stringctx); + metadata.createExtNodeMetadata(ctx, stringctx); visit(stringctx); } else { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); } final DecltypeContext declctx = ctx.decltype(); - final ExpressionMetadata declemd = adapter.createExpressionMetadata(declctx); + final ExpressionMetadata declemd = metadata.createExpressionMetadata(declctx); visit(declctx); castenmd.castTo = getLegalCast(ctx, parentemd.current, declemd.from, true); @@ -1615,9 +1721,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtbrace(final ExtbraceContext ctx) { - final ExtNodeMetadata braceenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = braceenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final boolean array = parentemd.current.sort == Sort.ARRAY; final boolean def = parentemd.current.sort == Sort.DEF; @@ -1643,8 +1749,8 @@ class Analyzer extends PlanAParserBaseVisitor { braceenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); if (array || def) { expremd.to = array ? definition.intType : definition.objectType; @@ -1653,15 +1759,15 @@ class Analyzer extends PlanAParserBaseVisitor { braceenmd.target = "#brace"; braceenmd.type = def ? definition.defType : - definition.getType(parentemd.current.struct, parentemd.current.type.getDimensions() - 1); + definition.getType(parentemd.current.struct, parentemd.current.type.getDimensions() - 1); analyzeLoadStoreExternal(ctx); parentemd.current = braceenmd.type; if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } } else { @@ -1680,16 +1786,16 @@ class Analyzer extends PlanAParserBaseVisitor { if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1)) { throw new IllegalArgumentException(error(ctx) + - "Illegal map get shortcut for type [" + parentemd.current.name + "]."); + "Illegal map get shortcut for type [" + parentemd.current.name + "]."); } if (setter != null && setter.arguments.size() != 2) { throw new IllegalArgumentException(error(ctx) + - "Illegal map set shortcut for type [" + parentemd.current.name + "]."); + "Illegal map set shortcut for type [" + parentemd.current.name + "]."); } if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) - || !getter.rtn.equals(setter.arguments.get(1)))) { + || !getter.rtn.equals(setter.arguments.get(1)))) { throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); } @@ -1697,21 +1803,21 @@ class Analyzer extends PlanAParserBaseVisitor { settype = setter == null ? null : setter.arguments.get(1); } else if (list) { getter = parentemd.current.struct.methods.get("get"); - setter = parentemd.current.struct.methods.get("add"); + setter = parentemd.current.struct.methods.get("set"); if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || - getter.arguments.get(0).sort != Sort.INT)) { + getter.arguments.get(0).sort != Sort.INT)) { throw new IllegalArgumentException(error(ctx) + - "Illegal list get shortcut for type [" + parentemd.current.name + "]."); + "Illegal list get shortcut for type [" + parentemd.current.name + "]."); } if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) { throw new IllegalArgumentException(error(ctx) + - "Illegal list set shortcut for type [" + parentemd.current.name + "]."); + "Illegal list set shortcut for type [" + parentemd.current.name + "]."); } if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) - || !getter.rtn.equals(setter.arguments.get(1)))) { + || !getter.rtn.equals(setter.arguments.get(1)))) { throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); } @@ -1735,7 +1841,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (braceenmd.target == null) { throw new IllegalArgumentException(error(ctx) + - "Attempting to address a non-array type [" + parentemd.current.name + "] as an array."); + "Attempting to address a non-array type [" + parentemd.current.name + "] as an array."); } return null; @@ -1743,17 +1849,17 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtdot(final ExtdotContext ctx) { - final ExtNodeMetadata dotemnd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata dotemnd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = dotemnd.parent; final ExtcallContext callctx = ctx.extcall(); final ExtfieldContext fieldctx = ctx.extfield(); if (callctx != null) { - adapter.createExtNodeMetadata(parent, callctx); + metadata.createExtNodeMetadata(parent, callctx); visit(callctx); } else if (fieldctx != null) { - adapter.createExtNodeMetadata(parent, fieldctx); + metadata.createExtNodeMetadata(parent, fieldctx); visit(fieldctx); } @@ -1762,9 +1868,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExttype(final ExttypeContext ctx) { - final ExtNodeMetadata typeenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata typeenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = typeenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); if (parentemd.current != null) { throw new IllegalArgumentException(error(ctx) + "Unexpected static type."); @@ -1776,7 +1882,7 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.statik = true; final ExtdotContext dotctx = ctx.extdot(); - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); return null; @@ -1784,9 +1890,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtcall(final ExtcallContext ctx) { - final ExtNodeMetadata callenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata callenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = callenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtdotContext dotctx = ctx.extdot(); final ExtbraceContext bracectx = ctx.extbrace(); @@ -1811,7 +1917,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (method == null && !def) { throw new IllegalArgumentException( - error(ctx) + "Unknown call [" + name + "] on type [" + struct.name + "]."); + error(ctx) + "Unknown call [" + name + "] on type [" + struct.name + "]."); } else if (method != null) { types = new Type[method.arguments.size()]; method.arguments.toArray(types); @@ -1823,8 +1929,8 @@ class Analyzer extends PlanAParserBaseVisitor { if (size != types.length) { throw new IllegalArgumentException(error(ctx) + "When calling [" + name + "] on type " + - "[" + struct.name + "] expected [" + types.length + "] arguments," + - " but found [" + arguments.size() + "]."); + "[" + struct.name + "] expected [" + types.length + "] arguments," + + " but found [" + arguments.size() + "]."); } } else { types = new Type[arguments.size()]; @@ -1837,8 +1943,8 @@ class Analyzer extends PlanAParserBaseVisitor { } for (int argument = 0; argument < size; ++argument) { - final ExpressionContext exprctx = adapter.updateExpressionTree(arguments.get(argument)); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(arguments.get(argument)); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = types[argument]; visit(exprctx); markCast(expremd); @@ -1847,10 +1953,10 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.statik = false; if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -1859,9 +1965,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtvar(final ExtvarContext ctx) { - final ExtNodeMetadata varenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata varenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = varenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final String name = ctx.ID().getText(); @@ -1886,10 +1992,10 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.current = varenmd.type; if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -1898,9 +2004,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtfield(final ExtfieldContext ctx) { - final ExtNodeMetadata memberenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = memberenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); if (ctx.EXTID() == null && ctx.EXTINTEGER() == null) { throw new IllegalArgumentException(error(ctx) + "Unexpected parser state."); @@ -1924,7 +2030,7 @@ class Analyzer extends PlanAParserBaseVisitor { throw new IllegalArgumentException(error(ctx) + "Must read array field [length]."); } else if (store) { throw new IllegalArgumentException( - error(ctx) + "Cannot write to read-only array field [length]."); + error(ctx) + "Cannot write to read-only array field [length]."); } memberenmd.target = "#length"; @@ -1945,7 +2051,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (field != null) { if (store && java.lang.reflect.Modifier.isFinal(field.reflect.getModifiers())) { throw new IllegalArgumentException(error(ctx) + "Cannot write to read-only" + - " field [" + value + "] for type [" + struct.name + "]."); + " field [" + value + "] for type [" + struct.name + "]."); } memberenmd.target = field; @@ -1962,12 +2068,12 @@ class Analyzer extends PlanAParserBaseVisitor { if (getter != null && (getter.rtn.sort == Sort.VOID || !getter.arguments.isEmpty())) { throw new IllegalArgumentException(error(ctx) + - "Illegal get shortcut on field [" + value + "] for type [" + struct.name + "]."); + "Illegal get shortcut on field [" + value + "] for type [" + struct.name + "]."); } if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 1)) { throw new IllegalArgumentException(error(ctx) + - "Illegal set shortcut on field [" + value + "] for type [" + struct.name + "]."); + "Illegal set shortcut on field [" + value + "] for type [" + struct.name + "]."); } Type settype = setter == null ? null : setter.arguments.get(0); @@ -1983,13 +2089,13 @@ class Analyzer extends PlanAParserBaseVisitor { if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || getter.arguments.get(0).sort != Sort.STRING)) { throw new IllegalArgumentException(error(ctx) + - "Illegal map get shortcut [" + value + "] for type [" + struct.name + "]."); + "Illegal map get shortcut [" + value + "] for type [" + struct.name + "]."); } if (setter != null && (setter.arguments.size() != 2 || - setter.arguments.get(0).sort != Sort.STRING)) { + setter.arguments.get(0).sort != Sort.STRING)) { throw new IllegalArgumentException(error(ctx) + - "Illegal map set shortcut [" + value + "] for type [" + struct.name + "]."); + "Illegal map set shortcut [" + value + "] for type [" + struct.name + "]."); } if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { @@ -2006,18 +2112,18 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.current.clazz.asSubclass(List.class); getter = parentemd.current.struct.methods.get("get"); - setter = parentemd.current.struct.methods.get("add"); + setter = parentemd.current.struct.methods.get("set"); if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || - getter.arguments.get(0).sort != Sort.INT)) { + getter.arguments.get(0).sort != Sort.INT)) { throw new IllegalArgumentException(error(ctx) + - "Illegal list get shortcut [" + value + "] for type [" + struct.name + "]."); + "Illegal list get shortcut [" + value + "] for type [" + struct.name + "]."); } if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 2 || - setter.arguments.get(0).sort != Sort.INT)) { + setter.arguments.get(0).sort != Sort.INT)) { throw new IllegalArgumentException(error(ctx) + - "Illegal list add shortcut [" + value + "] for type [" + struct.name + "]."); + "Illegal list set shortcut [" + value + "] for type [" + struct.name + "]."); } if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { @@ -2030,7 +2136,7 @@ class Analyzer extends PlanAParserBaseVisitor { constant = Integer.parseInt(value); } catch (NumberFormatException exception) { throw new IllegalArgumentException(error(ctx) + - "Illegal list shortcut value [" + value + "]."); + "Illegal list shortcut value [" + value + "]."); } } catch (ClassCastException exception) { //Do nothing. @@ -2050,17 +2156,17 @@ class Analyzer extends PlanAParserBaseVisitor { if (memberenmd.target == null) { throw new IllegalArgumentException( - error(ctx) + "Unknown field [" + value + "] for type [" + struct.name + "]."); + error(ctx) + "Unknown field [" + value + "] for type [" + struct.name + "]."); } } parentemd.statik = false; if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -2069,9 +2175,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtnew(ExtnewContext ctx) { - final ExtNodeMetadata newenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata newenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = newenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtdotContext dotctx = ctx.extdot(); final ExtbraceContext bracectx = ctx.extbrace(); @@ -2129,7 +2235,7 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.current = newenmd.type; } else { throw new IllegalArgumentException( - error(ctx) + "Unknown new call on type [" + struct.name + "]."); + error(ctx) + "Unknown new call on type [" + struct.name + "]."); } } else { throw new IllegalArgumentException(error(ctx) + "Unknown parser state."); @@ -2137,23 +2243,23 @@ class Analyzer extends PlanAParserBaseVisitor { if (size != types.length) { throw new IllegalArgumentException(error(ctx) + "When calling [" + name + "] on type " + - "[" + struct.name + "] expected [" + types.length + "] arguments," + - " but found [" + arguments.size() + "]."); + "[" + struct.name + "] expected [" + types.length + "] arguments," + + " but found [" + arguments.size() + "]."); } for (int argument = 0; argument < size; ++argument) { - final ExpressionContext exprctx = adapter.updateExpressionTree(arguments.get(argument)); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(arguments.get(argument)); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = types[argument]; visit(exprctx); markCast(expremd); } if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -2162,9 +2268,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtstring(final ExtstringContext ctx) { - final ExtNodeMetadata memberenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = memberenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final String string = ctx.STRING().getText(); @@ -2182,7 +2288,7 @@ class Analyzer extends PlanAParserBaseVisitor { throw new IllegalArgumentException(error(ctx) + "Must read String constant [" + string + "]."); } else if (store) { throw new IllegalArgumentException( - error(ctx) + "Cannot write to read-only String constant [" + string + "]."); + error(ctx) + "Cannot write to read-only String constant [" + string + "]."); } memberenmd.target = string; @@ -2194,10 +2300,10 @@ class Analyzer extends PlanAParserBaseVisitor { } if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -2211,7 +2317,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitIncrement(IncrementContext ctx) { - final ExpressionMetadata incremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); final Sort sort = incremd.to == null ? null : incremd.to.sort; final boolean positive = ctx.INCR() != null; @@ -2242,13 +2348,13 @@ class Analyzer extends PlanAParserBaseVisitor { } private void analyzeLoadStoreExternal(final ParserRuleContext source) { - final ExtNodeMetadata extenmd = adapter.getExtNodeMetadata(source); + final ExtNodeMetadata extenmd = metadata.getExtNodeMetadata(source); final ParserRuleContext parent = extenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); if (extenmd.last && parentemd.storeExpr != null) { final ParserRuleContext store = parentemd.storeExpr; - final ExpressionMetadata storeemd = adapter.createExpressionMetadata(parentemd.storeExpr); + final ExpressionMetadata storeemd = metadata.createExpressionMetadata(parentemd.storeExpr); final int token = parentemd.token; if (token > 0) { @@ -2259,12 +2365,12 @@ class Analyzer extends PlanAParserBaseVisitor { final boolean decimal = token == MUL || token == DIV || token == REM || token == SUB; extenmd.promote = add ? promoteAdd(extenmd.type, storeemd.from) : - xor ? promoteXor(extenmd.type, storeemd.from) : - promoteNumeric(extenmd.type, storeemd.from, decimal, true); + xor ? promoteXor(extenmd.type, storeemd.from) : + promoteNumeric(extenmd.type, storeemd.from, decimal, true); if (extenmd.promote == null) { throw new IllegalArgumentException("Cannot apply compound assignment to " + - " types [" + extenmd.type.name + "] and [" + storeemd.from.name + "]."); + "types [" + extenmd.type.name + "] and [" + storeemd.from.name + "]."); } extenmd.castFrom = getLegalCast(source, extenmd.type, extenmd.promote, false); @@ -2722,7 +2828,7 @@ class Analyzer extends PlanAParserBaseVisitor { from.clazz.asSubclass(to.clazz); return cast; - } catch (ClassCastException cce0) { + } catch (final ClassCastException cce0) { try { if (explicit) { to.clazz.asSubclass(from.clazz); @@ -2730,11 +2836,11 @@ class Analyzer extends PlanAParserBaseVisitor { return cast; } else { throw new ClassCastException( - error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); - } - } catch (ClassCastException cce1) { - throw new ClassCastException( error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); + } + } catch (final ClassCastException cce1) { + throw new ClassCastException( + error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); } } } @@ -2744,7 +2850,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (transform == null) { throw new ClassCastException( - error(source) + "Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "]."); + error(source) + "Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "]."); } return transform; @@ -2782,8 +2888,8 @@ class Analyzer extends PlanAParserBaseVisitor { } } else { throw new IllegalStateException(error(source) + "No valid constant cast from " + - "[" + cast.from.clazz.getCanonicalName() + "] to " + - "[" + cast.to.clazz.getCanonicalName() + "]."); + "[" + cast.from.clazz.getCanonicalName() + "] to " + + "[" + cast.to.clazz.getCanonicalName() + "]."); } } } @@ -2800,10 +2906,10 @@ class Analyzer extends PlanAParserBaseVisitor { return jmethod.invoke(object); } } catch (IllegalAccessException | IllegalArgumentException | - java.lang.reflect.InvocationTargetException | NullPointerException | - ExceptionInInitializerError exception) { + java.lang.reflect.InvocationTargetException | NullPointerException | + ExceptionInInitializerError exception) { throw new IllegalStateException(error(source) + "Unable to invoke transform to cast constant from " + - "[" + transform.from.name + "] to [" + transform.to.name + "]."); + "[" + transform.from.name + "] to [" + transform.to.name + "]."); } } @@ -2813,7 +2919,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (sort == Sort.DEF) { return definition.defType; } else if ((sort == Sort.DOUBLE || sort == Sort.DOUBLE_OBJ || sort == Sort.NUMBER) && decimal) { - return primitive ? definition.doubleType : definition.doubleobjType; + return primitive ? definition.doubleType : definition.doubleobjType; } else if ((sort == Sort.FLOAT || sort == Sort.FLOAT_OBJ) && decimal) { return primitive ? definition.floatType : definition.floatobjType; } else if (sort == Sort.LONG || sort == Sort.LONG_OBJ || sort == Sort.NUMBER) { @@ -2835,7 +2941,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (decimal) { if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort0 == Sort.NUMBER || - sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ || sort1 == Sort.NUMBER) { + sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ || sort1 == Sort.NUMBER) { return primitive ? definition.doubleType : definition.doubleobjType; } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { return primitive ? definition.floatType : definition.floatobjType; @@ -2843,8 +2949,8 @@ class Analyzer extends PlanAParserBaseVisitor { } if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort0 == Sort.NUMBER || - sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ || sort1 == Sort.NUMBER) { - return primitive ? definition.longType : definition.longobjType; + sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ || sort1 == Sort.NUMBER) { + return primitive ? definition.longType : definition.longobjType; } else if (sort0.numeric && sort1.numeric) { return primitive ? definition.intType : definition.intobjType; } @@ -2885,7 +2991,7 @@ class Analyzer extends PlanAParserBaseVisitor { final boolean primitive = sort0.primitive && sort1.primitive; if (sort0.bool && sort1.bool) { - return primitive ? definition.booleanType : definition.byteobjType; + return primitive ? definition.booleanType : definition.booleanobjType; } if (sort0.numeric && sort1.numeric) { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java index 4d6936a0d72..b6aa5f075ab 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java @@ -30,88 +30,87 @@ import java.security.CodeSource; import java.security.SecureClassLoader; import java.security.cert.Certificate; +/** + * The Compiler is the entry point for generating a Plan A script. The compiler will generate an ANTLR + * parse tree based on the source code that is passed in. Two passes will then be run over the parse tree, + * one for analysis using the {@link Analyzer} and another to generate the actual byte code using ASM in + * the {@link Writer}. + */ final class Compiler { + /** + * The default language API to be used with Plan A. The second construction is used + * to finalize all the variables, so there is no mistake of modification afterwards. + */ private static Definition DEFAULT_DEFINITION = new Definition(new Definition()); - /** we define the class with lowest privileges */ + /** + * Define the class with lowest privileges. + */ private static final CodeSource CODESOURCE; + /** + * Setup the code privileges. + */ static { try { + // Setup the code privileges. CODESOURCE = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null); } catch (MalformedURLException impossible) { throw new RuntimeException(impossible); } } + /** + * A secure class loader used to define Plan A scripts. + */ static class Loader extends SecureClassLoader { - Loader(ClassLoader parent) { + /** + * @param parent The parent ClassLoader. + */ + Loader(final ClassLoader parent) { super(parent); } - Class define(String name, byte[] bytes) { + /** + * Generates a Class object from the generated byte code. + * @param name The name of the class. + * @param bytes The generated byte code. + * @return A Class object extending {@link Executable}. + */ + Class define(final String name, final byte[] bytes) { return defineClass(name, bytes, 0, bytes.length, CODESOURCE).asSubclass(Executable.class); } } - static Executable compile(Loader loader, final String name, final String source, final Definition custom, CompilerSettings settings) { - long start = System.currentTimeMillis(); - - final Definition definition = custom == null ? DEFAULT_DEFINITION : new Definition(custom); - - //long end = System.currentTimeMillis() - start; - //System.out.println("types: " + end); - //start = System.currentTimeMillis(); - - //final ParserRuleContext root = createParseTree(source, types); - final ANTLRInputStream stream = new ANTLRInputStream(source); - final ErrorHandlingLexer lexer = new ErrorHandlingLexer(stream); - final PlanAParser parser = new PlanAParser(new CommonTokenStream(lexer)); - final ParserErrorStrategy strategy = new ParserErrorStrategy(); - - lexer.removeErrorListeners(); - lexer.setTypes(definition.structs.keySet()); - - //List tokens = lexer.getAllTokens(); - - //for (final Token token : tokens) { - // System.out.println(token.getType() + " " + token.getText()); - //} - - parser.removeErrorListeners(); - parser.setErrorHandler(strategy); - - ParserRuleContext root = parser.source(); - - //end = System.currentTimeMillis() - start; - //System.out.println("tree: " + end); - - final Adapter adapter = new Adapter(definition, source, root, settings); - - start = System.currentTimeMillis(); - - Analyzer.analyze(adapter); - //System.out.println(root.toStringTree(parser)); - - //end = System.currentTimeMillis() - start; - //System.out.println("analyze: " + end); - //start = System.currentTimeMillis(); - - final byte[] bytes = Writer.write(adapter); - - //end = System.currentTimeMillis() - start; - //System.out.println("write: " + end); - //start = System.currentTimeMillis(); - + /** + * Runs the two-pass compiler to generate a Plan A script. + * @param loader The ClassLoader used to define the script. + * @param name The name of the script. + * @param source The source code for the script. + * @param settings The CompilerSettings to be used during the compilation. + * @return An {@link Executable} Plan A script. + */ + static Executable compile(final Loader loader, final String name, final String source, + final Definition custom, final CompilerSettings settings) { + final Definition definition = custom != null ? new Definition(custom) : DEFAULT_DEFINITION; + final ParserRuleContext root = createParseTree(source, definition); + final Metadata metadata = new Metadata(definition, source, root, settings); + Analyzer.analyze(metadata); + final byte[] bytes = Writer.write(metadata); final Executable executable = createExecutable(loader, definition, name, source, bytes); - //end = System.currentTimeMillis() - start; - //System.out.println("create: " + end); - return executable; } - private static ParserRuleContext createParseTree(String source, Definition definition) { + /** + * Generates the ANTLR tree from the given source code. Several methods below, are used + * to ensure that the first error generated by ANTLR will cause the compilation to fail rather than + * use ANTLR's recovery strategies that may be potentially dangerous. + * @param source The source code for the script. + * @param definition The Plan A API. + * @return The root node for the ANTLR parse tree. + */ + private static ParserRuleContext createParseTree(final String source, final Definition definition) { final ANTLRInputStream stream = new ANTLRInputStream(source); final ErrorHandlingLexer lexer = new ErrorHandlingLexer(stream); final PlanAParser parser = new PlanAParser(new CommonTokenStream(lexer)); @@ -119,36 +118,50 @@ final class Compiler { lexer.removeErrorListeners(); lexer.setTypes(definition.structs.keySet()); - parser.removeErrorListeners(); parser.setErrorHandler(strategy); ParserRuleContext root = parser.source(); - // System.out.println(root.toStringTree(parser)); + return root; } - private static Executable createExecutable(Loader loader, Definition definition, String name, String source, byte[] bytes) { + /** + * Generates an {@link Executable} that can run a Plan A script. + * @param loader The {@link Loader} to define the script's class file. + * @param definition The Plan A API. + * @param name The name of the script. + * @param source The source text of the script. + * @param bytes The ASM generated byte code to define the class with. + * @return A Plan A {@link Executable} script. + */ + private static Executable createExecutable(final Loader loader, final Definition definition, + final String name, final String source, final byte[] bytes) { try { - // for debugging: - //try { - // FileOutputStream f = new FileOutputStream(new File("/Users/jdconrad/lang/generated/out.class"), false); - // f.write(bytes); - // f.close(); - //} catch (Exception e) { - // throw new RuntimeException(e); - //} + // Used for debugging. Uncomment this code and add -Dtests.security.manager=false when running to save + // the generated Java class files. The javap tool can then be used to inspect the generated byte code. + + // try { + // FileOutputStream f = new FileOutputStream(new File(""), false); + // f.write(bytes); + // f.close(); + // } catch (Exception e) { + // throw new RuntimeException(e); + // } final Class clazz = loader.define(Writer.CLASS_NAME, bytes); final java.lang.reflect.Constructor constructor = clazz.getConstructor(Definition.class, String.class, String.class); return constructor.newInstance(definition, name, source); - } catch (Exception exception) { + } catch (final Exception exception) { // Catch everything to let the user know this is something caused internally. throw new IllegalStateException( "An internal error occurred attempting to define the script [" + name + "].", exception); } } + /** + * All methods in the compiler should be static. + */ private Compiler() {} } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java index f66b65d0612..4b5e753e342 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java @@ -19,13 +19,30 @@ package org.elasticsearch.plan.a; -/** - * Settings to use when compiling a script +/** + * Settings to use when compiling a script. */ final class CompilerSettings { + /** + * Constant to be used when specifying numeric overflow when compiling a script. + */ + public static final String NUMERIC_OVERFLOW = "numeric_overflow"; + /** + * Constant to be used when specifying the maximum loop counter when compiling a script. + */ + public static final String MAX_LOOP_COUNTER = "max_loop_counter"; + + /** + * Whether or not to allow numeric values to overflow without exception. + */ private boolean numericOverflow = true; + /** + * The maximum number of statements allowed to be run in a loop. + */ + private int maxLoopCounter = 10000; + /** * Returns {@code true} if numeric operations should overflow, {@code false} * if they should signal an exception. @@ -46,4 +63,20 @@ final class CompilerSettings { public void setNumericOverflow(boolean allow) { this.numericOverflow = allow; } + + /** + * Returns the value for the cumulative total number of statements that can be made in all loops + * in a script before an exception is thrown. This attempts to prevent infinite loops. + */ + public int getMaxLoopCounter() { + return maxLoopCounter; + } + + /** + * Set the cumulative total number of statements that can be made in all loops. + * @see #getMaxLoopCounter + */ + public void setMaxLoopCounter(int max) { + this.maxLoopCounter = max; + } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java index c7a8ce410fd..eb5e0bdd66f 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java @@ -107,7 +107,7 @@ public class Def { } else if (owner instanceof List) { try { final int index = Integer.parseInt(name); - ((List)owner).add(index, value); + ((List)owner).set(index, value); } catch (NumberFormatException exception) { throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "]."); } @@ -198,7 +198,7 @@ public class Def { "in array class [" + array.getClass().getCanonicalName() + "].", throwable); } } else if (array instanceof List) { - ((List)array).add((int)index, value); + ((List)array).set((int)index, value); } else { throw new IllegalArgumentException("Attempting to address a non-array type " + "[" + array.getClass().getCanonicalName() + "] as an array."); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java index 5c52a202919..613bdafd8ee 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java @@ -24,10 +24,14 @@ import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; class Definition { enum Sort { @@ -351,16 +355,32 @@ class Definition { final Type utilityType; final Type defobjType; + final Type itrType; + final Type oitrType; + final Type sitrType; + + final Type collectionType; + final Type ocollectionType; + final Type scollectionType; + final Type listType; final Type arraylistType; - final Type mapType; - final Type hashmapType; - final Type olistType; final Type oarraylistType; - final Type omapType; - final Type ohashmapType; + final Type slistType; + final Type sarraylistType; + final Type setType; + final Type hashsetType; + final Type osetType; + final Type ohashsetType; + final Type ssetType; + final Type shashsetType; + + final Type mapType; + final Type hashmapType; + final Type oomapType; + final Type oohashmapType; final Type smapType; final Type shashmapType; final Type somapType; @@ -368,6 +388,12 @@ class Definition { final Type execType; + final Type exceptionType; + final Type arithexcepType; + final Type iargexcepType; + final Type istateexceptType; + final Type nfexcepType; + public Definition() { structs = new HashMap<>(); classes = new HashMap<>(); @@ -406,16 +432,32 @@ class Definition { utilityType = getType("Utility"); defobjType = getType("Def"); + itrType = getType("Iterator"); + oitrType = getType("Iterator"); + sitrType = getType("Iterator"); + + collectionType = getType("Collection"); + ocollectionType = getType("Collection"); + scollectionType = getType("Collection"); + listType = getType("List"); arraylistType = getType("ArrayList"); - mapType = getType("Map"); - hashmapType = getType("HashMap"); - olistType = getType("List"); oarraylistType = getType("ArrayList"); - omapType = getType("Map"); - ohashmapType = getType("HashMap"); + slistType = getType("List"); + sarraylistType = getType("ArrayList"); + setType = getType("Set"); + hashsetType = getType("HashSet"); + osetType = getType("Set"); + ohashsetType = getType("HashSet"); + ssetType = getType("Set"); + shashsetType = getType("HashSet"); + + mapType = getType("Map"); + hashmapType = getType("HashMap"); + oomapType = getType("Map"); + oohashmapType = getType("HashMap"); smapType = getType("Map"); shashmapType = getType("HashMap"); somapType = getType("Map"); @@ -423,6 +465,12 @@ class Definition { execType = getType("Executable"); + exceptionType = getType("Exception"); + arithexcepType = getType("ArithmeticException"); + iargexcepType = getType("IllegalArgumentException"); + istateexceptType = getType("IllegalStateException"); + nfexcepType = getType("NumberFormatException"); + addDefaultElements(); copyDefaultStructs(); addDefaultTransforms(); @@ -478,22 +526,44 @@ class Definition { utilityType = definition.utilityType; defobjType = definition.defobjType; + itrType = definition.itrType; + oitrType = definition.oitrType; + sitrType = definition.sitrType; + + collectionType = definition.collectionType; + ocollectionType = definition.ocollectionType; + scollectionType = definition.scollectionType; + listType = definition.listType; arraylistType = definition.arraylistType; - mapType = definition.mapType; - hashmapType = definition.hashmapType; - olistType = definition.olistType; oarraylistType = definition.oarraylistType; - omapType = definition.omapType; - ohashmapType = definition.ohashmapType; + slistType = definition.slistType; + sarraylistType = definition.sarraylistType; + setType = definition.setType; + hashsetType = definition.hashsetType; + osetType = definition.osetType; + ohashsetType = definition.ohashsetType; + ssetType = definition.ssetType; + shashsetType = definition.shashsetType; + + mapType = definition.mapType; + hashmapType = definition.hashmapType; + oomapType = definition.oomapType; + oohashmapType = definition.oohashmapType; smapType = definition.smapType; shashmapType = definition.shashmapType; somapType = definition.somapType; sohashmapType = definition.sohashmapType; execType = definition.execType; + + exceptionType = definition.exceptionType; + arithexcepType = definition.arithexcepType; + iargexcepType = definition.iargexcepType; + istateexceptType = definition.istateexceptType; + nfexcepType = definition.nfexcepType; } private void addDefaultStructs() { @@ -526,22 +596,44 @@ class Definition { addStruct( "Utility" , Utility.class ); addStruct( "Def" , Def.class ); - addStruct( "List" , List.class ); - addStruct( "ArrayList" , ArrayList.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); + addStruct( "Iterator" , Iterator.class ); + addStruct( "Iterator" , Iterator.class ); + addStruct( "Iterator" , Iterator.class ); - addStruct( "List" , List.class ); - addStruct( "ArrayList" , ArrayList.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); + addStruct( "Collection" , Collection.class ); + addStruct( "Collection" , Collection.class ); + addStruct( "Collection" , Collection.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); + addStruct( "List" , List.class ); + addStruct( "ArrayList" , ArrayList.class ); + addStruct( "List" , List.class ); + addStruct( "ArrayList" , ArrayList.class ); + addStruct( "List" , List.class ); + addStruct( "ArrayList" , ArrayList.class ); + + addStruct( "Set" , Set.class ); + addStruct( "HashSet" , HashSet.class ); + addStruct( "Set" , Set.class ); + addStruct( "HashSet" , HashSet.class ); + addStruct( "Set" , Set.class ); + addStruct( "HashSet" , HashSet.class ); + + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); addStruct( "Executable" , Executable.class ); + + addStruct( "Exception" , Exception.class); + addStruct( "ArithmeticException" , ArithmeticException.class); + addStruct( "IllegalArgumentException" , IllegalArgumentException.class); + addStruct( "IllegalStateException" , IllegalStateException.class); + addStruct( "NumberFormatException" , NumberFormatException.class); } private void addDefaultClasses() { @@ -568,10 +660,16 @@ class Definition { addClass("CharSequence"); addClass("String"); + addClass("Iterator"); + addClass("Collection"); addClass("List"); addClass("ArrayList"); + addClass("Set"); + addClass("HashSet"); addClass("Map"); addClass("HashMap"); + + addClass("Exception"); } private void addDefaultElements() { @@ -587,45 +685,39 @@ class Definition { addMethod("Boolean", "valueOf", null, true, booleanobjType, new Type[] {booleanType}, null, null); addMethod("Boolean", "booleanValue", null, false, booleanType, new Type[] {}, null, null); - addConstructor("Byte", "new", new Type[]{byteType}, null); + addConstructor("Byte", "new", new Type[] {byteType}, null); addMethod("Byte", "valueOf", null, true, byteobjType, new Type[] {byteType}, null, null); - addMethod("Byte", "byteValue", null, false, byteType, new Type[] {}, null, null); addField("Byte", "MIN_VALUE", null, true, byteType, null); addField("Byte", "MAX_VALUE", null, true, byteType, null); - addConstructor("Short", "new", new Type[]{shortType}, null); + addConstructor("Short", "new", new Type[] {shortType}, null); addMethod("Short", "valueOf", null, true, shortobjType, new Type[] {shortType}, null, null); - addMethod("Short", "shortValue", null, false, shortType, new Type[] {}, null, null); addField("Short", "MIN_VALUE", null, true, shortType, null); addField("Short", "MAX_VALUE", null, true, shortType, null); - addConstructor("Character", "new", new Type[]{charType}, null); + addConstructor("Character", "new", new Type[] {charType}, null); addMethod("Character", "valueOf", null, true, charobjType, new Type[] {charType}, null, null); addMethod("Character", "charValue", null, false, charType, new Type[] {}, null, null); addField("Character", "MIN_VALUE", null, true, charType, null); addField("Character", "MAX_VALUE", null, true, charType, null); - addConstructor("Integer", "new", new Type[]{intType}, null); + addConstructor("Integer", "new", new Type[] {intType}, null); addMethod("Integer", "valueOf", null, true, intobjType, new Type[] {intType}, null, null); - addMethod("Integer", "intValue", null, false, intType, new Type[] {}, null, null); addField("Integer", "MIN_VALUE", null, true, intType, null); addField("Integer", "MAX_VALUE", null, true, intType, null); - addConstructor("Long", "new", new Type[]{longType}, null); + addConstructor("Long", "new", new Type[] {longType}, null); addMethod("Long", "valueOf", null, true, longobjType, new Type[] {longType}, null, null); - addMethod("Long", "longValue", null, false, longType, new Type[] {}, null, null); addField("Long", "MIN_VALUE", null, true, longType, null); addField("Long", "MAX_VALUE", null, true, longType, null); - addConstructor("Float", "new", new Type[]{floatType}, null); + addConstructor("Float", "new", new Type[] {floatType}, null); addMethod("Float", "valueOf", null, true, floatobjType, new Type[] {floatType}, null, null); - addMethod("Float", "floatValue", null, false, floatType, new Type[] {}, null, null); addField("Float", "MIN_VALUE", null, true, floatType, null); addField("Float", "MAX_VALUE", null, true, floatType, null); - addConstructor("Double", "new", new Type[]{doubleType}, null); + addConstructor("Double", "new", new Type[] {doubleType}, null); addMethod("Double", "valueOf", null, true, doubleobjType, new Type[] {doubleType}, null, null); - addMethod("Double", "doubleValue", null, false, doubleType, new Type[] {}, null, null); addField("Double", "MIN_VALUE", null, true, doubleType, null); addField("Double", "MAX_VALUE", null, true, doubleType, null); @@ -760,7 +852,44 @@ class Definition { addMethod("Utility", "DoubleToboolean", null, true, booleanType, new Type[] {doubleobjType}, null, null); addMethod("Utility", "DoubleTochar", null, true, charType, new Type[] {doubleobjType}, null, null); - addMethod("Math", "dmax", "max", true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "abs", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "fabs", "abs", true, floatType, new Type[] {floatType}, null, null); + addMethod("Math", "labs", "abs", true, longType, new Type[] {longType}, null, null); + addMethod("Math", "iabs", "abs", true, intType, new Type[] {intType}, null, null); + addMethod("Math", "acos", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "asin", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "atan", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "atan2", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "cbrt", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "ceil", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "cos", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "cosh", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "exp", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "expm1", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "floor", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "hypot", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "log", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "log10", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "log1p", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "max", "max", true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "fmax", "max", true, floatType, new Type[] {floatType, floatType}, null, null); + addMethod("Math", "lmax", "max", true, longType, new Type[] {longType, longType}, null, null); + addMethod("Math", "imax", "max", true, intType, new Type[] {intType, intType}, null, null); + addMethod("Math", "min", "min", true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "fmin", "min", true, floatType, new Type[] {floatType, floatType}, null, null); + addMethod("Math", "lmin", "min", true, longType, new Type[] {longType, longType}, null, null); + addMethod("Math", "imin", "min", true, intType, new Type[] {intType, intType}, null, null); + addMethod("Math", "pow", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "random", null, true, doubleType, new Type[] {}, null, null); + addMethod("Math", "rint", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "round", null, true, longType, new Type[] {doubleType}, null, null); + addMethod("Math", "sin", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "sinh", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "sqrt", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "tan", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "tanh", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "toDegrees", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "toRadians", null, true, doubleType, new Type[] {doubleType}, null, null); addMethod("Def", "DefToboolean", null, true, booleanType, new Type[] {defType}, null, null); addMethod("Def", "DefTobyte", null, true, byteType, new Type[] {defType}, null, null); @@ -778,56 +907,124 @@ class Definition { addMethod("Def", "DefToLong", null, true, longobjType, new Type[] {defType}, null, null); addMethod("Def", "DefToFloat", null, true, floatobjType, new Type[] {defType}, null, null); addMethod("Def", "DefToDouble", null, true, doubleobjType, new Type[] {defType}, null, null); - - addMethod("List", "addLast", "add", false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("List", "add", null, false, voidType, new Type[] {intType, objectType}, null, new Type[] {intType, defType}); + + addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); + addMethod("Iterator", "next", null, false, objectType, new Type[] {}, defType, null); + addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); + + addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); + addMethod("Iterator", "next", null, false, objectType, new Type[] {}, null, null); + addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); + + addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); + addMethod("Iterator", "next", null, false, objectType, new Type[] {}, stringType, null); + addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); + + addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); + addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Collection", "iterator", null, false, itrType, new Type[] {}, null, null); + addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); + + addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); + addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Collection", "iterator", null, false, oitrType, new Type[] {}, null, null); + addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); + + addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); + addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Collection", "iterator", null, false, sitrType, new Type[] {}, null, null); + addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); + + addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, defType, new Type[] {intType, defType}); addMethod("List", "get", null, false, objectType, new Type[] {intType}, defType, null); addMethod("List", "remove", null, false, objectType, new Type[] {intType}, defType, null); - addMethod("List", "size", null, false, intType, new Type[] {}, null, null); - addMethod("List", "isEmpty", null, false, booleanType, new Type[] {}, null, null); addConstructor("ArrayList", "new", new Type[] {}, null); + addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, null, null); + addMethod("List", "get", null, false, objectType, new Type[] {intType}, null, null); + addMethod("List", "remove", null, false, objectType, new Type[] {intType}, null, null); + + addConstructor("ArrayList", "new", new Type[] {}, null); + + addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, stringType, new Type[] {intType, stringType}); + addMethod("List", "get", null, false, objectType, new Type[] {intType}, stringType, null); + addMethod("List", "remove", null, false, objectType, new Type[] {intType}, stringType, null); + + addConstructor("ArrayList", "new", new Type[] {}, null); + + addConstructor("HashSet", "new", new Type[] {}, null); + + addConstructor("HashSet", "new", new Type[] {}, null); + + addConstructor("HashSet", "new", new Type[] {}, null); + addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {defType, defType}); addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {defType}); addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Map", "keySet", null, false, osetType, new Type[] {}, setType, null); + addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, collectionType, null); addConstructor("HashMap", "new", new Type[] {}, null); - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {stringType, defType}); - addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); - addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); - addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - - addConstructor("HashMap", "new", new Type[] {}, null); - - addMethod("List", "addLast", "add", false, booleanType, new Type[] {objectType}, null, null); - addMethod("List", "add", null, false, voidType, new Type[] {intType, objectType}, null, null); - addMethod("List", "get", null, false, objectType, new Type[] {intType}, null, null); - addMethod("List", "remove", null, false, objectType, new Type[] {intType}, null, null); - addMethod("List", "size", null, false, intType, new Type[] {}, null, null); - addMethod("List", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - - addConstructor("ArrayList", "new", new Type[] {}, null); - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, null, null); addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, null, null); addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Map", "keySet", null, false, osetType, new Type[] {}, null, null); + addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, null, null); addConstructor("HashMap", "new", new Type[] {}, null); + addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {stringType, defType}); + addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); + addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); + addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Map", "keySet", null, false, osetType, new Type[] {}, ssetType, null); + addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, collectionType, null); + + addConstructor("HashMap", "new", new Type[] {}, null); + addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, null, new Type[] {stringType, objectType}); addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType}); addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Map", "keySet", null, false, osetType, new Type[] {}, ssetType, null); + addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, null, null); addConstructor("HashMap", "new", new Type[] {}, null); + + addMethod("Exception", "getMessage", null, false, stringType, new Type[] {}, null, null); + + addConstructor("ArithmeticException", "new", new Type[] {stringType}, null); + + addConstructor("IllegalArgumentException", "new", new Type[] {stringType}, null); + + addConstructor("IllegalStateException", "new", new Type[] {stringType}, null); + + addConstructor("NumberFormatException", "new", new Type[] {stringType}, null); } private void copyDefaultStructs() { @@ -845,21 +1042,36 @@ class Definition { copyStruct("CharSequence", "Object"); copyStruct("String", "CharSequence", "Object"); - copyStruct("List", "Object"); - copyStruct("ArrayList", "List", "Object"); + copyStruct("List", "Collection", "Object"); + copyStruct("ArrayList", "List", "Collection", "Object"); + copyStruct("List", "Collection", "Object"); + copyStruct("ArrayList", "List", "Collection", "Object"); + copyStruct("List", "Collection", "Object"); + copyStruct("ArrayList", "List", "Collection", "Object"); + + copyStruct("Set", "Collection", "Object"); + copyStruct("HashSet", "Set", "Collection", "Object"); + copyStruct("Set", "Collection", "Object"); + copyStruct("HashSet", "Set", "Collection", "Object"); + copyStruct("Set", "Collection", "Object"); + copyStruct("HashSet", "Set", "Collection", "Object"); + copyStruct("Map", "Object"); copyStruct("HashMap", "Map", "Object"); - copyStruct("Map", "Object"); - copyStruct("HashMap", "Map", "Object"); - - copyStruct("List", "Object"); - copyStruct("ArrayList", "List", "Object"); copyStruct("Map", "Object"); copyStruct("HashMap", "Map", "Object"); + copyStruct("Map", "Object"); + copyStruct("HashMap", "Map", "Object"); copyStruct("Map", "Object"); copyStruct("HashMap", "Map", "Object"); copyStruct("Executable", "Object"); + + copyStruct("Exception", "Object"); + copyStruct("ArithmeticException", "Exception", "Object"); + copyStruct("IllegalArgumentException", "Exception", "Object"); + copyStruct("IllegalStateException", "Exception", "Object"); + copyStruct("NumberFormatException", "Exception", "Object"); } private void addDefaultTransforms() { @@ -984,7 +1196,7 @@ class Definition { addTransform(defType, longobjType, "Def", "DefToLong", true); addTransform(defType, floatobjType, "Def", "DefToFloat", true); addTransform(defType, doubleobjType, "Def", "DefToDouble", true); - + addTransform(numberType, booleanType, "Utility", "NumberToboolean", true); addTransform(numberType, byteType, "Number", "byteValue", false); addTransform(numberType, shortType, "Number", "shortValue", false); @@ -1162,41 +1374,155 @@ class Definition { addBound(stringType, charseqType, charseqType); + addBound(oitrType, itrType, itrType); + addBound(oitrType, sitrType, itrType); + addBound(sitrType, itrType, itrType); + + addBound(ocollectionType, collectionType, collectionType); + addBound(scollectionType, collectionType, collectionType); + addBound(scollectionType, ocollectionType, ocollectionType); + addBound(listType, collectionType, collectionType); + addBound(listType, ocollectionType, collectionType); + addBound(listType, scollectionType, collectionType); + addBound(arraylistType, collectionType, collectionType); + addBound(arraylistType, ocollectionType, collectionType); + addBound(arraylistType, scollectionType, collectionType); addBound(arraylistType, listType, listType); + addBound(olistType, collectionType, collectionType); + addBound(olistType, ocollectionType, ocollectionType); + addBound(olistType, scollectionType, ocollectionType); addBound(olistType, listType, listType); addBound(olistType, arraylistType, listType); + addBound(oarraylistType, collectionType, collectionType); + addBound(oarraylistType, ocollectionType, ocollectionType); + addBound(oarraylistType, scollectionType, ocollectionType); addBound(oarraylistType, listType, listType); - addBound(oarraylistType, olistType, olistType); addBound(oarraylistType, arraylistType, arraylistType); + addBound(oarraylistType, olistType, olistType); + addBound(slistType, collectionType, collectionType); + addBound(slistType, ocollectionType, ocollectionType); + addBound(slistType, scollectionType, scollectionType); + addBound(slistType, listType, listType); + addBound(slistType, arraylistType, listType); + addBound(slistType, olistType, olistType); + addBound(slistType, oarraylistType, olistType); + addBound(sarraylistType, collectionType, collectionType); + addBound(sarraylistType, ocollectionType, ocollectionType); + addBound(sarraylistType, scollectionType, scollectionType); + addBound(sarraylistType, listType, listType); + addBound(sarraylistType, arraylistType, arraylistType); + addBound(sarraylistType, olistType, olistType); + addBound(sarraylistType, oarraylistType, oarraylistType); + addBound(sarraylistType, slistType, slistType); + addBound(setType, collectionType, collectionType); + addBound(setType, ocollectionType, collectionType); + addBound(setType, scollectionType, collectionType); + addBound(setType, listType, collectionType); + addBound(setType, arraylistType, collectionType); + addBound(setType, olistType, collectionType); + addBound(setType, oarraylistType, collectionType); + addBound(setType, slistType, collectionType); + addBound(setType, sarraylistType, collectionType); + addBound(hashsetType, collectionType, collectionType); + addBound(hashsetType, ocollectionType, collectionType); + addBound(hashsetType, scollectionType, collectionType); + addBound(hashsetType, listType, collectionType); + addBound(hashsetType, arraylistType, collectionType); + addBound(hashsetType, olistType, collectionType); + addBound(hashsetType, oarraylistType, collectionType); + addBound(hashsetType, slistType, collectionType); + addBound(hashsetType, sarraylistType, collectionType); + addBound(hashsetType, setType, setType); + addBound(osetType, collectionType, collectionType); + addBound(osetType, ocollectionType, ocollectionType); + addBound(osetType, scollectionType, ocollectionType); + addBound(osetType, listType, collectionType); + addBound(osetType, arraylistType, collectionType); + addBound(osetType, olistType, ocollectionType); + addBound(osetType, oarraylistType, ocollectionType); + addBound(osetType, slistType, ocollectionType); + addBound(osetType, sarraylistType, ocollectionType); + addBound(osetType, setType, setType); + addBound(osetType, hashsetType, setType); + addBound(ohashsetType, collectionType, collectionType); + addBound(ohashsetType, ocollectionType, ocollectionType); + addBound(ohashsetType, scollectionType, ocollectionType); + addBound(ohashsetType, listType, collectionType); + addBound(ohashsetType, arraylistType, collectionType); + addBound(ohashsetType, olistType, ocollectionType); + addBound(ohashsetType, oarraylistType, ocollectionType); + addBound(ohashsetType, slistType, ocollectionType); + addBound(ohashsetType, sarraylistType, ocollectionType); + addBound(ohashsetType, setType, setType); + addBound(ohashsetType, hashsetType, hashsetType); + addBound(ohashsetType, osetType, osetType); + addBound(ssetType, collectionType, collectionType); + addBound(ssetType, ocollectionType, ocollectionType); + addBound(ssetType, scollectionType, scollectionType); + addBound(ssetType, listType, collectionType); + addBound(ssetType, arraylistType, collectionType); + addBound(ssetType, olistType, ocollectionType); + addBound(ssetType, oarraylistType, ocollectionType); + addBound(ssetType, slistType, scollectionType); + addBound(ssetType, sarraylistType, scollectionType); + addBound(ssetType, setType, setType); + addBound(ssetType, hashsetType, setType); + addBound(ssetType, osetType, osetType); + addBound(ssetType, ohashsetType, osetType); + addBound(shashsetType, collectionType, collectionType); + addBound(shashsetType, ocollectionType, ocollectionType); + addBound(shashsetType, scollectionType, scollectionType); + addBound(shashsetType, listType, collectionType); + addBound(shashsetType, arraylistType, collectionType); + addBound(shashsetType, olistType, ocollectionType); + addBound(shashsetType, oarraylistType, ocollectionType); + addBound(shashsetType, slistType, scollectionType); + addBound(shashsetType, sarraylistType, scollectionType); + addBound(shashsetType, setType, setType); + addBound(shashsetType, hashsetType, hashsetType); + addBound(shashsetType, osetType, osetType); + addBound(shashsetType, ohashsetType, hashsetType); + addBound(shashsetType, ssetType, ssetType); addBound(hashmapType, mapType, mapType); - addBound(omapType, mapType, mapType); - addBound(omapType, hashmapType, mapType); - addBound(ohashmapType, mapType, mapType); - addBound(ohashmapType, hashmapType, hashmapType); - addBound(ohashmapType, omapType, omapType); + addBound(oomapType, mapType, mapType); + addBound(oomapType, hashmapType, mapType); + addBound(oohashmapType, mapType, mapType); + addBound(oohashmapType, hashmapType, hashmapType); + addBound(oohashmapType, oomapType, oomapType); addBound(smapType, mapType, mapType); addBound(smapType, hashmapType, mapType); - addBound(smapType, omapType, omapType); - addBound(smapType, ohashmapType, omapType); + addBound(smapType, oomapType, oomapType); + addBound(smapType, oohashmapType, oomapType); addBound(shashmapType, mapType, mapType); addBound(shashmapType, hashmapType, hashmapType); - addBound(shashmapType, omapType, omapType); - addBound(shashmapType, ohashmapType, ohashmapType); + addBound(shashmapType, oomapType, oomapType); + addBound(shashmapType, oohashmapType, oohashmapType); addBound(shashmapType, smapType, smapType); addBound(somapType, mapType, mapType); addBound(somapType, hashmapType, mapType); - addBound(somapType, omapType, omapType); - addBound(somapType, ohashmapType, omapType); + addBound(somapType, oomapType, oomapType); + addBound(somapType, oohashmapType, oomapType); addBound(somapType, smapType, smapType); addBound(somapType, shashmapType, smapType); addBound(sohashmapType, mapType, mapType); addBound(sohashmapType, hashmapType, hashmapType); - addBound(sohashmapType, omapType, omapType); - addBound(sohashmapType, ohashmapType, ohashmapType); + addBound(sohashmapType, oomapType, oomapType); + addBound(sohashmapType, oohashmapType, oohashmapType); addBound(sohashmapType, smapType, smapType); addBound(sohashmapType, shashmapType, shashmapType); addBound(sohashmapType, somapType, somapType); + + addBound(arithexcepType, exceptionType, exceptionType); + addBound(iargexcepType, exceptionType, exceptionType); + addBound(istateexceptType, exceptionType, exceptionType); + addBound(nfexcepType, exceptionType, exceptionType); + addBound(arithexcepType, iargexcepType, exceptionType); + addBound(arithexcepType, istateexceptType, exceptionType); + addBound(arithexcepType, nfexcepType, exceptionType); + addBound(iargexcepType, istateexceptType, exceptionType); + addBound(iargexcepType, nfexcepType, exceptionType); + addBound(istateexceptType, nfexcepType, exceptionType); } public final void addStruct(final String name, final Class clazz) { @@ -1232,30 +1558,30 @@ class Definition { if (owner == null) { throw new IllegalArgumentException( - "Owner struct [" + struct + "] not defined for constructor [" + name + "]."); + "Owner struct [" + struct + "] not defined for constructor [" + name + "]."); } if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { throw new IllegalArgumentException( - "Invalid constructor name [" + name + "] with the struct [" + owner.name + "]."); + "Invalid constructor name [" + name + "] with the struct [" + owner.name + "]."); } if (owner.constructors.containsKey(name)) { throw new IllegalArgumentException( - "Duplicate constructor name [" + name + "] found within the struct [" + owner.name + "]."); + "Duplicate constructor name [" + name + "] found within the struct [" + owner.name + "]."); } if (owner.statics.containsKey(name)) { throw new IllegalArgumentException("Constructors and functions may not have the same name" + - " [" + name + "] within the same struct [" + owner.name + "]."); + " [" + name + "] within the same struct [" + owner.name + "]."); } if (owner.methods.containsKey(name)) { throw new IllegalArgumentException("Constructors and methods may not have the same name" + - " [" + name + "] within the same struct [" + owner.name + "]."); + " [" + name + "] within the same struct [" + owner.name + "]."); } - final Class[] classes = new Class[args.length]; + final Class[] classes = new Class[args.length]; for (int count = 0; count < classes.length; ++count) { if (genargs != null) { @@ -1263,8 +1589,8 @@ class Definition { genargs[count].clazz.asSubclass(args[count].clazz); } catch (ClassCastException exception) { throw new ClassCastException("Generic argument [" + genargs[count].name + "]" + - " is not a sub class of [" + args[count].name + "] in the constructor" + - " [" + name + " ] from the struct [" + owner.name + "]."); + " is not a sub class of [" + args[count].name + "] in the constructor" + + " [" + name + " ] from the struct [" + owner.name + "]."); } } @@ -1277,12 +1603,12 @@ class Definition { reflect = owner.clazz.getConstructor(classes); } catch (NoSuchMethodException exception) { throw new IllegalArgumentException("Constructor [" + name + "] not found for class" + - " [" + owner.clazz.getName() + "] with arguments " + Arrays.toString(classes) + "."); + " [" + owner.clazz.getName() + "] with arguments " + Arrays.toString(classes) + "."); } final org.objectweb.asm.commons.Method asm = org.objectweb.asm.commons.Method.getMethod(reflect); final Constructor constructor = - new Constructor(name, owner, Arrays.asList(genargs != null ? genargs : args), asm, reflect); + new Constructor(name, owner, Arrays.asList(genargs != null ? genargs : args), asm, reflect); owner.constructors.put(name, constructor); } @@ -1293,37 +1619,37 @@ class Definition { if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined" + - " for " + (statik ? "function" : "method") + " [" + name + "]."); + " for " + (statik ? "function" : "method") + " [" + name + "]."); } if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { throw new IllegalArgumentException("Invalid " + (statik ? "function" : "method") + - " name [" + name + "] with the struct [" + owner.name + "]."); + " name [" + name + "] with the struct [" + owner.name + "]."); } if (owner.constructors.containsKey(name)) { throw new IllegalArgumentException("Constructors and " + (statik ? "functions" : "methods") + - " may not have the same name [" + name + "] within the same struct" + - " [" + owner.name + "]."); + " may not have the same name [" + name + "] within the same struct" + + " [" + owner.name + "]."); } if (owner.statics.containsKey(name)) { if (statik) { throw new IllegalArgumentException( - "Duplicate function name [" + name + "] found within the struct [" + owner.name + "]."); + "Duplicate function name [" + name + "] found within the struct [" + owner.name + "]."); } else { throw new IllegalArgumentException("Functions and methods may not have the same name" + - " [" + name + "] within the same struct [" + owner.name + "]."); + " [" + name + "] within the same struct [" + owner.name + "]."); } } if (owner.methods.containsKey(name)) { if (statik) { throw new IllegalArgumentException("Functions and methods may not have the same name" + - " [" + name + "] within the same struct [" + owner.name + "]."); + " [" + name + "] within the same struct [" + owner.name + "]."); } else { throw new IllegalArgumentException("Duplicate method name [" + name + "]" + - " found within the struct [" + owner.name + "]."); + " found within the struct [" + owner.name + "]."); } } @@ -1332,18 +1658,18 @@ class Definition { genrtn.clazz.asSubclass(rtn.clazz); } catch (ClassCastException exception) { throw new ClassCastException("Generic return [" + genrtn.clazz.getCanonicalName() + "]" + - " is not a sub class of [" + rtn.clazz.getCanonicalName() + "] in the method" + - " [" + name + " ] from the struct [" + owner.name + "]."); + " is not a sub class of [" + rtn.clazz.getCanonicalName() + "] in the method" + + " [" + name + " ] from the struct [" + owner.name + "]."); } } if (genargs != null && genargs.length != args.length) { throw new IllegalArgumentException("Generic arguments arity [" + genargs.length + "] is not the same as " + - (statik ? "function" : "method") + " [" + name + "] arguments arity" + - " [" + args.length + "] within the struct [" + owner.name + "]."); + (statik ? "function" : "method") + " [" + name + "] arguments arity" + + " [" + args.length + "] within the struct [" + owner.name + "]."); } - final Class[] classes = new Class[args.length]; + final Class[] classes = new Class[args.length]; for (int count = 0; count < classes.length; ++count) { if (genargs != null) { @@ -1351,8 +1677,8 @@ class Definition { genargs[count].clazz.asSubclass(args[count].clazz); } catch (ClassCastException exception) { throw new ClassCastException("Generic argument [" + genargs[count].name + "] is not a sub class" + - " of [" + args[count].name + "] in the " + (statik ? "function" : "method") + - " [" + name + " ] from the struct [" + owner.name + "]."); + " of [" + args[count].name + "] in the " + (statik ? "function" : "method") + + " [" + name + " ] from the struct [" + owner.name + "]."); } } @@ -1365,15 +1691,15 @@ class Definition { reflect = owner.clazz.getMethod(alias == null ? name : alias, classes); } catch (NoSuchMethodException exception) { throw new IllegalArgumentException((statik ? "Function" : "Method") + - " [" + (alias == null ? name : alias) + "] not found for class [" + owner.clazz.getName() + "]" + - " with arguments " + Arrays.toString(classes) + "."); + " [" + (alias == null ? name : alias) + "] not found for class [" + owner.clazz.getName() + "]" + + " with arguments " + Arrays.toString(classes) + "."); } if (!reflect.getReturnType().equals(rtn.clazz)) { throw new IllegalArgumentException("Specified return type class [" + rtn.clazz + "]" + - " does not match the found return type class [" + reflect.getReturnType() + "] for the " + - (statik ? "function" : "method") + " [" + name + "]" + - " within the struct [" + owner.name + "]."); + " does not match the found return type class [" + reflect.getReturnType() + "] for the " + + (statik ? "function" : "method") + " [" + name + "]" + + " within the struct [" + owner.name + "]."); } final org.objectweb.asm.commons.Method asm = org.objectweb.asm.commons.Method.getMethod(reflect); @@ -1383,32 +1709,32 @@ class Definition { try { if (statik) { handle = MethodHandles.publicLookup().in(owner.clazz).findStatic( - owner.clazz, alias == null ? name : alias, MethodType.methodType(rtn.clazz, classes)); + owner.clazz, alias == null ? name : alias, MethodType.methodType(rtn.clazz, classes)); } else { handle = MethodHandles.publicLookup().in(owner.clazz).findVirtual( - owner.clazz, alias == null ? name : alias, MethodType.methodType(rtn.clazz, classes)); + owner.clazz, alias == null ? name : alias, MethodType.methodType(rtn.clazz, classes)); } } catch (NoSuchMethodException | IllegalAccessException exception) { throw new IllegalArgumentException("Method [" + (alias == null ? name : alias) + "]" + - " not found for class [" + owner.clazz.getName() + "]" + - " with arguments " + Arrays.toString(classes) + "."); + " not found for class [" + owner.clazz.getName() + "]" + + " with arguments " + Arrays.toString(classes) + "."); } final Method method = new Method(name, owner, genrtn != null ? genrtn : rtn, - Arrays.asList(genargs != null ? genargs : args), asm, reflect, handle); + Arrays.asList(genargs != null ? genargs : args), asm, reflect, handle); final int modifiers = reflect.getModifiers(); if (statik) { if (!java.lang.reflect.Modifier.isStatic(modifiers)) { throw new IllegalArgumentException("Function [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to a static Java method."); + " within the struct [" + owner.name + "] is not linked to a static Java method."); } owner.functions.put(name, method); } else { if (java.lang.reflect.Modifier.isStatic(modifiers)) { throw new IllegalArgumentException("Method [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to a non-static Java method."); + " within the struct [" + owner.name + "] is not linked to a non-static Java method."); } owner.methods.put(name, method); @@ -1421,31 +1747,31 @@ class Definition { if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for " + - (statik ? "static" : "member") + " [" + name + "]."); + (statik ? "static" : "member") + " [" + name + "]."); } if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { throw new IllegalArgumentException("Invalid " + (statik ? "static" : "member") + - " name [" + name + "] with the struct [" + owner.name + "]."); + " name [" + name + "] with the struct [" + owner.name + "]."); } if (owner.statics.containsKey(name)) { if (statik) { throw new IllegalArgumentException("Duplicate static name [" + name + "]" + - " found within the struct [" + owner.name + "]."); + " found within the struct [" + owner.name + "]."); } else { throw new IllegalArgumentException("Statics and members may not have the same name " + - "[" + name + "] within the same struct [" + owner.name + "]."); + "[" + name + "] within the same struct [" + owner.name + "]."); } } if (owner.members.containsKey(name)) { if (statik) { throw new IllegalArgumentException("Statics and members may not have the same name " + - "[" + name + "] within the same struct [" + owner.name + "]."); + "[" + name + "] within the same struct [" + owner.name + "]."); } else { throw new IllegalArgumentException("Duplicate member name [" + name + "]" + - " found within the struct [" + owner.name + "]."); + " found within the struct [" + owner.name + "]."); } } @@ -1454,8 +1780,8 @@ class Definition { generic.clazz.asSubclass(type.clazz); } catch (ClassCastException exception) { throw new ClassCastException("Generic type [" + generic.clazz.getCanonicalName() + "]" + - " is not a sub class of [" + type.clazz.getCanonicalName() + "] for the field" + - " [" + name + " ] from the struct [" + owner.name + "]."); + " is not a sub class of [" + type.clazz.getCanonicalName() + "] for the field" + + " [" + name + " ] from the struct [" + owner.name + "]."); } } @@ -1465,7 +1791,7 @@ class Definition { reflect = owner.clazz.getField(alias == null ? name : alias); } catch (NoSuchFieldException exception) { throw new IllegalArgumentException("Field [" + (alias == null ? name : alias) + "]" + - " not found for class [" + owner.clazz.getName() + "]."); + " not found for class [" + owner.clazz.getName() + "]."); } MethodHandle getter = null; @@ -1474,13 +1800,13 @@ class Definition { try { if (!statik) { getter = MethodHandles.publicLookup().in(owner.clazz).findGetter( - owner.clazz, alias == null ? name : alias, type.clazz); + owner.clazz, alias == null ? name : alias, type.clazz); setter = MethodHandles.publicLookup().in(owner.clazz).findSetter( - owner.clazz, alias == null ? name : alias, type.clazz); + owner.clazz, alias == null ? name : alias, type.clazz); } } catch (NoSuchFieldException | IllegalAccessException exception) { throw new IllegalArgumentException("Getter/Setter [" + (alias == null ? name : alias) + "]" + - " not found for class [" + owner.clazz.getName() + "]."); + " not found for class [" + owner.clazz.getName() + "]."); } final Field field = new Field(name, owner, generic == null ? type : generic, type, reflect, getter, setter); @@ -1493,14 +1819,14 @@ class Definition { if (!java.lang.reflect.Modifier.isFinal(modifiers)) { throw new IllegalArgumentException("Static [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to static Java field."); + " within the struct [" + owner.name + "] is not linked to static Java field."); } owner.statics.put(alias == null ? name : alias, field); } else { if (java.lang.reflect.Modifier.isStatic(modifiers)) { throw new IllegalArgumentException("Member [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to non-static Java field."); + " within the struct [" + owner.name + "] is not linked to non-static Java field."); } owner.members.put(alias == null ? name : alias, field); @@ -1519,18 +1845,18 @@ class Definition { if (struct == null) { throw new IllegalArgumentException("Child struct [" + children[count] + "]" + - " not defined for copy to owner struct [" + owner.name + "]."); + " not defined for copy to owner struct [" + owner.name + "]."); } try { owner.clazz.asSubclass(child.clazz); } catch (ClassCastException exception) { throw new ClassCastException("Child struct [" + child.name + "]" + - " is not a super type of owner struct [" + owner.name + "] in copy."); + " is not a super type of owner struct [" + owner.name + "] in copy."); } final boolean object = child.clazz.equals(Object.class) && - java.lang.reflect.Modifier.isInterface(owner.clazz.getModifiers()); + java.lang.reflect.Modifier.isInterface(owner.clazz.getModifiers()); for (final Method method : child.methods.values()) { if (owner.methods.get(method.name) == null) { @@ -1543,22 +1869,22 @@ class Definition { reflect = clazz.getMethod(method.method.getName(), method.reflect.getParameterTypes()); } catch (NoSuchMethodException exception) { throw new IllegalArgumentException("Method [" + method.method.getName() + "] not found for" + - " class [" + owner.clazz.getName() + "] with arguments " + - Arrays.toString(method.reflect.getParameterTypes()) + "."); + " class [" + owner.clazz.getName() + "] with arguments " + + Arrays.toString(method.reflect.getParameterTypes()) + "."); } try { handle = MethodHandles.publicLookup().in(owner.clazz).findVirtual( - owner.clazz, method.method.getName(), - MethodType.methodType(method.reflect.getReturnType(), method.reflect.getParameterTypes())); + owner.clazz, method.method.getName(), + MethodType.methodType(method.reflect.getReturnType(), method.reflect.getParameterTypes())); } catch (NoSuchMethodException | IllegalAccessException exception) { throw new IllegalArgumentException("Method [" + method.method.getName() + "] not found for" + - " class [" + owner.clazz.getName() + "] with arguments " + - Arrays.toString(method.reflect.getParameterTypes()) + "."); + " class [" + owner.clazz.getName() + "] with arguments " + + Arrays.toString(method.reflect.getParameterTypes()) + "."); } owner.methods.put(method.name, - new Method(method.name, owner, method.rtn, method.arguments, method.method, reflect, handle)); + new Method(method.name, owner, method.rtn, method.arguments, method.method, reflect, handle)); } } @@ -1572,21 +1898,21 @@ class Definition { reflect = owner.clazz.getField(field.reflect.getName()); } catch (NoSuchFieldException exception) { throw new IllegalArgumentException("Field [" + field.reflect.getName() + "]" + - " not found for class [" + owner.clazz.getName() + "]."); + " not found for class [" + owner.clazz.getName() + "]."); } try { getter = MethodHandles.publicLookup().in(owner.clazz).findGetter( - owner.clazz, field.name, field.type.clazz); + owner.clazz, field.name, field.type.clazz); setter = MethodHandles.publicLookup().in(owner.clazz).findSetter( - owner.clazz, field.name, field.type.clazz); + owner.clazz, field.name, field.type.clazz); } catch (NoSuchFieldException | IllegalAccessException exception) { throw new IllegalArgumentException("Getter/Setter [" + field.name + "]" + - " not found for class [" + owner.clazz.getName() + "]."); + " not found for class [" + owner.clazz.getName() + "]."); } owner.members.put(field.name, - new Field(field.name, owner, field.type, field.generic, reflect, getter, setter)); + new Field(field.name, owner, field.type, field.generic, reflect, getter, setter)); } } } @@ -1598,19 +1924,19 @@ class Definition { if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for" + - " transform with cast type from [" + from.name + "] and cast type to [" + to.name + "]."); + " transform with cast type from [" + from.name + "] and cast type to [" + to.name + "]."); } if (from.equals(to)) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "] cannot" + - " have cast type from [" + from.name + "] be the same as cast type to [" + to.name + "]."); + " have cast type from [" + from.name + "] be the same as cast type to [" + to.name + "]."); } final Cast cast = new Cast(from, to); if (transforms.containsKey(cast)) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] already defined."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] already defined."); } Method method; @@ -1622,14 +1948,14 @@ class Definition { if (method == null) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using a function [" + name + "] that is not defined."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using a function [" + name + "] that is not defined."); } if (method.arguments.size() != 1) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using function [" + name + "] does not have a single type argument."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using function [" + name + "] does not have a single type argument."); } Type argument = method.arguments.get(0); @@ -1642,8 +1968,8 @@ class Definition { upcast = argument; } catch (ClassCastException cce1) { throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " function [" + name + "] cannot cast from type to the function input argument type."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + + " function [" + name + "] cannot cast from type to the function input argument type."); } } @@ -1657,8 +1983,8 @@ class Definition { downcast = to; } catch (ClassCastException cce1) { throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " function [" + name + "] cannot cast to type to the function return argument type."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + + " function [" + name + "] cannot cast to type to the function return argument type."); } } } else { @@ -1666,14 +1992,14 @@ class Definition { if (method == null) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using a method [" + name + "] that is not defined."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using a method [" + name + "] that is not defined."); } if (!method.arguments.isEmpty()) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using method [" + name + "] does not have a single type argument."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using method [" + name + "] does not have a single type argument."); } try { @@ -1684,8 +2010,8 @@ class Definition { upcast = getType(owner.name); } catch (ClassCastException cce1) { throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " method [" + name + "] cannot cast from type to the method input argument type."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + + " method [" + name + "] cannot cast from type to the method input argument type."); } } @@ -1699,8 +2025,8 @@ class Definition { downcast = to; } catch (ClassCastException cce1) { throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "]" + - " using method [" + name + "] cannot cast to type to the method return argument type."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + "]" + + " using method [" + name + "] cannot cast to type to the method return argument type."); } } } @@ -1715,12 +2041,12 @@ class Definition { if (bounds.containsKey(pair0)) { throw new IllegalArgumentException( - "Bound already defined for types [" + type0.name + "] and [" + type1.name + "]."); + "Bound already defined for types [" + type0.name + "] and [" + type1.name + "]."); } if (bounds.containsKey(pair1)) { throw new IllegalArgumentException( - "Bound already defined for types [" + type1.name + "] and [" + type0.name + "]."); + "Bound already defined for types [" + type1.name + "] and [" + type0.name + "]."); } bounds.put(pair0, bound); @@ -1763,7 +2089,7 @@ class Definition { clazz = Class.forName(type.getInternalName().replace('/', '.')); } catch (ClassNotFoundException exception) { throw new IllegalArgumentException("The class [" + type.getInternalName() + "]" + - " could not be found to create type [" + name + "]."); + " could not be found to create type [" + name + "]."); } sort = Sort.ARRAY; diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java index 4963815f470..768b0aea2ec 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java @@ -1,5 +1,3 @@ -package org.elasticsearch.plan.a; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -19,6 +17,8 @@ package org.elasticsearch.plan.a; * under the License. */ +package org.elasticsearch.plan.a; + import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.LexerNoViableAltException; import org.antlr.v4.runtime.misc.Interval; diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Metadata.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Metadata.java new file mode 100644 index 00000000000..221b951497f --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Metadata.java @@ -0,0 +1,619 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.tree.ParseTree; +import org.elasticsearch.plan.a.Definition.Cast; +import org.elasticsearch.plan.a.Definition.Type; +import org.elasticsearch.plan.a.PlanAParser.ExpressionContext; +import org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; + +import java.util.HashMap; +import java.util.Map; + +/** + * Metadata is a wrapper for all the data that is collected by the {@link Analyzer}. Each node in the ANTLR parse tree + * will have one of the types of metadata to store information used either in a different node by the analyzer + * or by the {@link Writer} during byte code generation. Metadata also contains several objects passed into the + * {@link Analyzer} and {@link Writer} used during compilation including the {@link Definition}, the source code, + * the root of the ANTLR parse tree, and the {@link CompilerSettings}. + */ +class Metadata { + + /** + * StatementMetadata is used to store metadata mostly about + * control flow for ANTLR nodes related to if/else, do, while, for, etc. + */ + static class StatementMetadata { + /** + * The source variable is the ANTLR node used to generate this metadata. + */ + final ParserRuleContext source; + + /** + * The lastSource variable will be set to true when the final statement from the root ANTLR node is about + * to be visited. This is used to determine whether or not the auto-return feature is allowed to be used, + * and if a null return value needs to be generated automatically since a return value is always required. + */ + boolean lastSource = false; + + /** + * The beginLoop variable will be set to true whenever a loop node is initially visited including inner + * loops. This will not be propagated down the parse tree afterwards, though. This is used to determine + * whether or not inLoop should be set further down the tree. Note that inLoop alone is not enough + * information to determine whether we are in the last statement of a loop because we may inside of + * multiple loops, so this variable is necessary. + */ + boolean beginLoop = false; + + /** + * The inLoop variable is set to true when inside a loop. This will be propagated down the parse tree. This + * is used to determine whether or not continue and break statements are legal. + */ + boolean inLoop = false; + + /** + * The lastLoop variable is set to true when the final statement of a loop is reached. This will be + * propagated down the parse tree until another loop is reached and then will not be propagated further for + * the current loop. This is used to determine whether or not a continue statement is superfluous. + */ + boolean lastLoop = false; + + /** + * The methodEscape variable is set to true when a statement would cause the method to potentially exit. This + * includes return, throw, and continuous loop statements. Note that a catch statement may possibly + * reset this to false after a throw statement. This will be propagated up the tree as far as necessary. + * This is used by the {@link Writer} to ensure that superfluous statements aren't unnecessarily written + * into generated bytecode. + */ + boolean methodEscape = false; + + /** + * The loopEscape variable is set to true when a loop is going to be exited. This may be caused by a number of + * different statements including continue, break, return, etc. This will only be propagated as far as the + * loop node. This is used to ensure that in certain case an infinite loop will be caught at + * compile-time rather than run-time. + */ + boolean loopEscape = false; + + /** + * The allLast variable is set whenever a final statement in a block is reached. This includes the end of loop, + * if, else, etc. This will be only propagated to the top of the block statement ANTLR node. + * This is used to ensure that there are no unreachable statements within the script. + */ + boolean allLast = false; + + /** + * The anyContinue will be set to true when a continue statement is visited. This will be propagated to the + * loop node it's within. This is used to ensure that in certain case an infinite loop will be caught at + * compile-time rather than run-time. + */ + boolean anyContinue = false; + + /** + * The anyBreak will be set to true when a break statement is visited. This will be propagated to the + * loop node it's within. This is used to in conjunction with methodEscape to ensure there are no unreachable + * statements within the script. + */ + boolean anyBreak = false; + + /** + * The count variable is used as a rudimentary count of statements within a loop. This will be used in + * the {@link Writer} to keep a count of statements that have been executed at run-time to ensure that a loop + * will exit if it runs too long. + */ + int count = 0; + + /** + * The exception variable is used to store the exception type when a throw node is visited. This is used by + * the {@link Writer} to write the correct type of exception in the generated byte code. + */ + Type exception = null; + + /** + * The slot variable is used to store the place on the stack of where a thrown exception will be stored to. + * This is used by the {@link Writer}. + */ + int slot = -1; + + /** + * Constructor. + * @param source The associated ANTLR node. + */ + private StatementMetadata(final ParserRuleContext source) { + this.source = source; + } + } + + /** + * ExpressionMetadata is used to store metadata mostly about constants and casting + * for ANTLR nodes related to mathematical operations. + */ + static class ExpressionMetadata { + /** + * The source variable is the ANTLR node used to generate this metadata. + */ + final ParserRuleContext source; + + /** + * The read variable is used to determine whether or not the value of an expression will be read from. + * This is set to false when the expression is the left-hand side of an assignment that is not chained or + * when a method call is made alone. This will propagate down the tree as far as necessary. + * The {@link Writer} uses this to determine when a value may need to be popped from the stack + * such as when a method call returns a value that is never read. + */ + boolean read = true; + + /** + * The statement variable is set true when an expression is a complete meaning that there is some sort + * of effect on a variable or a method call is made. This will propagate up the tree as far as necessary. + * This prevents statements that have no effect on the output of a script from being executed. + */ + boolean statement = false; + + /** + * The preConst variable is set to a non-null value when a constant statement is made in a script. This is + * used to track the constant value prior to any casts being made on an ANTLR node. + */ + Object preConst = null; + + /** + * The postConst variable is set to a non-null value when a cast is made on a node where a preConst variable + * has already been set when the cast would leave the constant as a non-object value except in the case of a + * String. This will be propagated up the tree and used to simplify constants when possible such as making + * the value of 2*2 be 4 in the * node, so that the {@link Writer} only has to push a 4 onto the stack. + */ + Object postConst = null; + + /** + * The isNull variable is set to true when a null constant statement is made in the script. This allows the + * {@link Writer} to potentially shortcut certain comparison operations. + */ + boolean isNull = false; + + /** + * The to variable is used to track what an ANTLR node's value should be cast to. This is set on every ANTLR + * node in the tree, and used by the {@link Writer} to make a run-time cast if necessary in the byte code. + * This is also used by the {@link Analyzer} to determine if a cast is legal. + */ + Type to = null; + + /** + * The from variable is used to track what an ANTLR node's value should be cast from. This is set on every + * ANTLR node in the tree independent of other nodes. This is used by the {@link Analyzer} to determine if a + * cast is legal. + */ + Type from = null; + + /** + * The explicit variable is set to true when a cast is explicitly made in the script. This tracks whether + * or not a cast is a legal up cast. + */ + boolean explicit = false; + + /** + * The typesafe variable is set to true when a dynamic type is used as part of an expression. This propagates + * up the tree to the top of the expression. This allows for implicit up casts throughout the expression and + * is used by the {@link Analyzer}. + */ + boolean typesafe = true; + + /** + * This is set to the combination of the to and from variables at the end of each node visit in the + * {@link Analyzer}. This is set on every ANTLR node in the tree independent of other nodes, and is + * used by {@link Writer} to make a run-time cast if necessary in the byte code. + */ + Cast cast = null; + + /** + * Constructor. + * @param source The associated ANTLR node. + */ + private ExpressionMetadata(final ParserRuleContext source) { + this.source = source; + } + } + + /** + * ExternalMetadata is used to store metadata about the overall state of a variable/method chain such as + * '(int)x.get(3)' where each piece of that chain is broken into it's indiviual pieces and stored in + * {@link ExtNodeMetadata}. + */ + static class ExternalMetadata { + /** + * The source variable is the ANTLR node used to generate this metadata. + */ + final ParserRuleContext source; + + /** + * The read variable is set to true when the value of a variable/method chain is going to be read from. + * This is used by the {@link Analyzer} to determine if this variable/method chain will be in a standalone + * statement. + */ + boolean read = false; + + /** + * The storeExpr variable is set to the right-hand side of an assignment in the variable/method chain if + * necessary. This is used by the {@link Analyzer} to set the proper metadata for a read versus a write, + * and is used by the {@link Writer} to determine if a bytecode operation should be a load or a store. + */ + ParserRuleContext storeExpr = null; + + /** + * The token variable is set to a constant value of the operator type (+, -, etc.) when a compound assignment + * is being visited. This is also used by the increment and decrement operators. This is used by both the + * {@link Analyzer} and {@link Writer} to correctly handle the compound assignment. + */ + int token = 0; + + /** + * The pre variable is set to true when pre-increment or pre-decrement is visited. This is used by both the + * {@link Analyzer} and {@link Writer} to correctly handle any reads of the variable/method chain that are + * necessary. + */ + boolean pre = false; + + /** + * The post variable is set to true when post-increment or post-decrement is visited. This is used by both the + * {@link Analyzer} and {@link Writer} to correctly handle any reads of the variable/method chain that are + * necessary. + */ + boolean post = false; + + /** + * The scope variable is incremented and decremented when a precedence node is visited as part of a + * variable/method chain. This is used by the {@link Analyzer} to determine when the final piece of the + * variable/method chain has been reached. + */ + int scope = 0; + + /** + * The current variable is set to whatever the current type is within the visited node of the variable/method + * chain. This changes as the nodes for the variable/method are walked through. This is used by the + * {@link Analyzer} to make decisions about whether or not a cast is legal, and what methods are available + * for that specific type. + */ + Type current = null; + + /** + * The statik variable is set to true when a variable/method chain begins with static type. This is used by + * the {@link Analyzer} to determine what methods/members are available for that specific type. + */ + boolean statik = false; + + /** + * The statement variable is set to true when a variable/method chain can be standalone statement. This is + * used by the {@link Analyzer} to error out if there a variable/method chain that is not a statement. + */ + boolean statement = false; + + /** + * The constant variable is set when a String constant is part of the variable/method chain. String is a + * special case because methods/members need to be able to be called on a String constant, so this can't be + * only as part of {@link ExpressionMetadata}. This is used by the {@link Writer} to write out the String + * constant in the byte code. + */ + Object constant = null; + + /** + * Constructor. + * @param source The associated ANTLR node. + */ + private ExternalMetadata(final ParserRuleContext source) { + this.source = source; + } + } + + static class ExtNodeMetadata { + /** + * The parent variable is top-level ANTLR node of the variable/method chain. This is used to retrieve the + * ExternalMetadata for the variable/method chain this ExtNodeMetadata is a piece of. + */ + final ParserRuleContext parent; + + /** + * The source variable is the ANTLR node used to generate this metadata. + */ + final ParserRuleContext source; + + /** + * The target variable is set to a value based on the type of ANTLR node that is visited. This is used by + * {@link Writer} to determine whether a cast, store, load, or method call should be written in byte code + * depending on what the target variable is. + */ + Object target = null; + + /** + * The last variable is set to true when the last ANTLR node of the variable/method chain is visted. This is + * used by the {@link Writer} in conjuction with the storeExpr variable to determine whether or not a store + * needs to be written as opposed to a load. + */ + boolean last = false; + + /** + * The type variable is set to the type that a visited node ends with. This is used by both the + * {@link Analyzer} and {@link Writer} to make decisions about compound assignments, String constants, and + * shortcuts. + */ + Type type = null; + + /** + * The promote variable is set to the type of a promotion within a compound assignment. Compound assignments + * may require promotion between the left-hand side variable and right-hand side value. This is used by the + * {@link Writer} to make the correct decision about the byte code operation. + */ + Type promote = null; + + /** + * The castFrom variable is set during a compound assignment. This is used by the {@link Writer} to + * cast the values to the promoted type during a compound assignment. + */ + Cast castFrom = null; + + /** + * The castTo variable is set during an explicit cast in a variable/method chain or during a compound + * assignment. This is used by the {@link Writer} to either do an explicit cast, or cast the values + * from the promoted type back to the original type during a compound assignment. + */ + Cast castTo = null; + + /** + * Constructor. + * @param parent The top-level ANTLR node for the variable/method chain. + * @param source The associated ANTLR node. + */ + private ExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) { + this.parent = parent; + this.source = source; + } + } + + /** + * A utility method to output consistent error messages. + * @param ctx The ANTLR node the error occurred in. + * @return The error message with tacked on line number and character position. + */ + static String error(final ParserRuleContext ctx) { + return "Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: "; + } + + /** + * Acts as both the Plan A API and white-list for what types and methods are allowed. + */ + final Definition definition; + + /** + * The original text of the input script. This is used to write out the source code into + * the byte code file for debugging purposes. + */ + final String source; + + /** + * Toot node of the ANTLR tree for the Plan A script. + */ + final ParserRuleContext root; + + /** + * Used to determine certain compile-time constraints such as whether or not numeric overflow is allowed + * and how many statements are allowed before a loop will throw an exception. + */ + final CompilerSettings settings; + + /** + * Used to determine what slot the input variable is stored in. This is used in the {@link Writer} whenever + * the input variable is accessed. + */ + int inputValueSlot = -1; + + /** + * Used to determine what slot the score variable is stored in. This is used in the {@link Writer} whenever + * the score variable is accessed. + */ + int scoreValueSlot = -1; + + /** + * Used to determine what slot the loopCounter variable is stored in. This is used n the {@link Writer} whenever + * the loop variable is accessed. + */ + int loopCounterSlot = -1; + + /** + * Maps the relevant ANTLR node to its metadata. + */ + private final Map statementMetadata = new HashMap<>(); + + /** + * Maps the relevant ANTLR node to its metadata. + */ + private final Map expressionMetadata = new HashMap<>(); + + /** + * Maps the relevant ANTLR node to its metadata. + */ + private final Map externalMetadata = new HashMap<>(); + + /** + * Maps the relevant ANTLR node to its metadata. + */ + private final Map extNodeMetadata = new HashMap<>(); + + /** + * Constructor. + * @param definition The Plan A definition. + * @param source The source text for the script. + * @param root The root ANTLR node. + * @param settings The compile-time settings. + */ + Metadata(final Definition definition, final String source, final ParserRuleContext root, final CompilerSettings settings) { + this.definition = definition; + this.source = source; + this.root = root; + this.settings = settings; + } + + /** + * Creates a new StatementMetadata and stores it in the statementMetadata map. + * @param source The ANTLR node for this metadata. + * @return The new StatementMetadata. + */ + StatementMetadata createStatementMetadata(final ParserRuleContext source) { + final StatementMetadata sourcesmd = new StatementMetadata(source); + statementMetadata.put(source, sourcesmd); + + return sourcesmd; + } + + /** + * Retrieves StatementMetadata from the statementMetadata map. + * @param source The ANTLR node for this metadata. + * @return The retrieved StatementMetadata. + */ + StatementMetadata getStatementMetadata(final ParserRuleContext source) { + final StatementMetadata sourcesmd = statementMetadata.get(source); + + if (sourcesmd == null) { + throw new IllegalStateException(error(source) + "Statement metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourcesmd; + } + + /** + * The ANTLR parse tree is modified in one single case; a parent node needs to check a child node to see if it's + * a precedence node, and if so, it must be removed from the tree permanently. Once the ANTLR tree is built, + * precedence nodes are no longer necessary to maintain the correct ordering of the tree, so they only + * add a level of indirection where complicated decisions about metadata passing would have to be made. This + * method removes the need for those decisions. + * @param source The child ANTLR node to check for precedence. + * @return The updated child ANTLR node. + */ + ExpressionContext updateExpressionTree(ExpressionContext source) { + // Check to see if the ANTLR node is a precedence node. + if (source instanceof PrecedenceContext) { + final ParserRuleContext parent = source.getParent(); + int index = 0; + + // Mark the index of the source node within the list of child nodes from the parent. + for (final ParseTree child : parent.children) { + if (child == source) { + break; + } + + ++index; + } + + // If there are multiple precedence nodes in a row, remove them all. + while (source instanceof PrecedenceContext) { + source = ((PrecedenceContext)source).expression(); + } + + // Update the parent node with the child of the precedence node. + parent.children.set(index, source); + } + + return source; + } + + /** + * Creates a new ExpressionMetadata and stores it in the expressionMetadata map. + * @param source The ANTLR node for this metadata. + * @return The new ExpressionMetadata. + */ + ExpressionMetadata createExpressionMetadata(ParserRuleContext source) { + final ExpressionMetadata sourceemd = new ExpressionMetadata(source); + expressionMetadata.put(source, sourceemd); + + return sourceemd; + } + + /** + * Retrieves ExpressionMetadata from the expressionMetadata map. + * @param source The ANTLR node for this metadata. + * @return The retrieved ExpressionMetadata. + */ + ExpressionMetadata getExpressionMetadata(final ParserRuleContext source) { + final ExpressionMetadata sourceemd = expressionMetadata.get(source); + + if (sourceemd == null) { + throw new IllegalStateException(error(source) + "Expression metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourceemd; + } + + /** + * Creates a new ExternalMetadata and stores it in the externalMetadata map. + * @param source The ANTLR node for this metadata. + * @return The new ExternalMetadata. + */ + ExternalMetadata createExternalMetadata(final ParserRuleContext source) { + final ExternalMetadata sourceemd = new ExternalMetadata(source); + externalMetadata.put(source, sourceemd); + + return sourceemd; + } + + /** + * Retrieves ExternalMetadata from the externalMetadata map. + * @param source The ANTLR node for this metadata. + * @return The retrieved ExternalMetadata. + */ + ExternalMetadata getExternalMetadata(final ParserRuleContext source) { + final ExternalMetadata sourceemd = externalMetadata.get(source); + + if (sourceemd == null) { + throw new IllegalStateException(error(source) + "External metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourceemd; + } + + /** + * Creates a new ExtNodeMetadata and stores it in the extNodeMetadata map. + * @param source The ANTLR node for this metadata. + * @return The new ExtNodeMetadata. + */ + ExtNodeMetadata createExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) { + final ExtNodeMetadata sourceemd = new ExtNodeMetadata(parent, source); + extNodeMetadata.put(source, sourceemd); + + return sourceemd; + } + + /** + * Retrieves ExtNodeMetadata from the extNodeMetadata map. + * @param source The ANTLR node for this metadata. + * @return The retrieved ExtNodeMetadata. + */ + ExtNodeMetadata getExtNodeMetadata(final ParserRuleContext source) { + final ExtNodeMetadata sourceemd = extNodeMetadata.get(source); + + if (sourceemd == null) { + throw new IllegalStateException(error(source) + "External metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourceemd; + } +} diff --git a/core/src/test/java/org/elasticsearch/recovery/SmallFileChunkSizeRecoveryIT.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAError.java similarity index 54% rename from core/src/test/java/org/elasticsearch/recovery/SmallFileChunkSizeRecoveryIT.java rename to plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAError.java index aba0f38af85..dea3c2021a8 100644 --- a/core/src/test/java/org/elasticsearch/recovery/SmallFileChunkSizeRecoveryIT.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAError.java @@ -17,17 +17,21 @@ * under the License. */ -package org.elasticsearch.recovery; - -import org.elasticsearch.common.settings.Settings; +package org.elasticsearch.plan.a; /** - * + * The PlanAError class is used to throw internal errors caused by Plan A scripts that cannot be + * caught using a standard {@link Exception}. This prevents the user from catching this specific error + * (as Exceptions are available in the Plan A API, but Errors are not,) and possibly continuing to do + * something hazardous. The alternative was extending {@link Throwable}, but that seemed worse than using + * an {@link Error} in this case. */ -public class SmallFileChunkSizeRecoveryIT extends SimpleRecoveryIT { - - @Override - protected Settings recoverySettings() { - return Settings.settingsBuilder().put("index.shard.recovery.file_chunk_size", "3b").build(); +public class PlanAError extends Error { + /** + * Constructor. + * @param message The error message. + */ + public PlanAError(final String message) { + super(message); } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java index da9943385c0..e7de571ef2e 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java @@ -40,17 +40,17 @@ class PlanAParser extends Parser { STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, EXTID=76; public static final int - RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_initializer = 4, - RULE_afterthought = 5, RULE_declaration = 6, RULE_decltype = 7, RULE_declvar = 8, - RULE_expression = 9, RULE_extstart = 10, RULE_extprec = 11, RULE_extcast = 12, - RULE_extbrace = 13, RULE_extdot = 14, RULE_exttype = 15, RULE_extcall = 16, - RULE_extvar = 17, RULE_extfield = 18, RULE_extnew = 19, RULE_extstring = 20, - RULE_arguments = 21, RULE_increment = 22; + RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_emptyscope = 4, + RULE_initializer = 5, RULE_afterthought = 6, RULE_declaration = 7, RULE_decltype = 8, + RULE_declvar = 9, RULE_trap = 10, RULE_expression = 11, RULE_extstart = 12, + RULE_extprec = 13, RULE_extcast = 14, RULE_extbrace = 15, RULE_extdot = 16, + RULE_exttype = 17, RULE_extcall = 18, RULE_extvar = 19, RULE_extfield = 20, + RULE_extnew = 21, RULE_extstring = 22, RULE_arguments = 23, RULE_increment = 24; public static final String[] ruleNames = { - "source", "statement", "block", "empty", "initializer", "afterthought", - "declaration", "decltype", "declvar", "expression", "extstart", "extprec", - "extcast", "extbrace", "extdot", "exttype", "extcall", "extvar", "extfield", - "extnew", "extstring", "arguments", "increment" + "source", "statement", "block", "empty", "emptyscope", "initializer", + "afterthought", "declaration", "decltype", "declvar", "trap", "expression", + "extstart", "extprec", "extcast", "extbrace", "extdot", "exttype", "extcall", + "extvar", "extfield", "extnew", "extstring", "arguments", "increment" }; private static final String[] _LITERAL_NAMES = { @@ -149,21 +149,21 @@ class PlanAParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(47); + setState(51); _errHandler.sync(this); _la = _input.LA(1); do { { { - setState(46); + setState(50); statement(); } } - setState(49); + setState(53); _errHandler.sync(this); _la = _input.LA(1); } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0) ); - setState(51); + setState(55); match(EOF); } } @@ -266,31 +266,14 @@ class PlanAParser extends Parser { } public static class TryContext extends StatementContext { public TerminalNode TRY() { return getToken(PlanAParser.TRY, 0); } - public List block() { - return getRuleContexts(BlockContext.class); + public BlockContext block() { + return getRuleContext(BlockContext.class,0); } - public BlockContext block(int i) { - return getRuleContext(BlockContext.class,i); + public List trap() { + return getRuleContexts(TrapContext.class); } - public List CATCH() { return getTokens(PlanAParser.CATCH); } - public TerminalNode CATCH(int i) { - return getToken(PlanAParser.CATCH, i); - } - public List LP() { return getTokens(PlanAParser.LP); } - public TerminalNode LP(int i) { - return getToken(PlanAParser.LP, i); - } - public List RP() { return getTokens(PlanAParser.RP); } - public TerminalNode RP(int i) { - return getToken(PlanAParser.RP, i); - } - public List TYPE() { return getTokens(PlanAParser.TYPE); } - public TerminalNode TYPE(int i) { - return getToken(PlanAParser.TYPE, i); - } - public List ID() { return getTokens(PlanAParser.ID); } - public TerminalNode ID(int i) { - return getToken(PlanAParser.ID, i); + public TrapContext trap(int i) { + return getRuleContext(TrapContext.class,i); } public TryContext(StatementContext ctx) { copyFrom(ctx); } @Override @@ -391,29 +374,29 @@ class PlanAParser extends Parser { int _la; try { int _alt; - setState(136); + setState(134); switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { case 1: _localctx = new IfContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(53); - match(IF); - setState(54); - match(LP); - setState(55); - expression(0); - setState(56); - match(RP); setState(57); - block(); + match(IF); + setState(58); + match(LP); + setState(59); + expression(0); setState(60); + match(RP); + setState(61); + block(); + setState(64); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: { - setState(58); + setState(62); match(ELSE); - setState(59); + setState(63); block(); } break; @@ -424,58 +407,28 @@ class PlanAParser extends Parser { _localctx = new WhileContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(62); + setState(66); match(WHILE); - setState(63); + setState(67); match(LP); - setState(64); - expression(0); - setState(65); - match(RP); setState(68); - switch (_input.LA(1)) { - case LBRACK: - case LP: - case IF: - case WHILE: - case DO: - case FOR: - case CONTINUE: - case BREAK: - case RETURN: - case NEW: - case TRY: - case THROW: - case BOOLNOT: - case BWNOT: - case ADD: - case SUB: - case INCR: - case DECR: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case CHAR: - case TRUE: - case FALSE: - case NULL: - case TYPE: - case ID: + expression(0); + setState(69); + match(RP); + setState(72); + switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { + case 1: { - setState(66); + setState(70); block(); } break; - case SEMICOLON: + case 2: { - setState(67); + setState(71); empty(); } break; - default: - throw new NoViableAltException(this); } } break; @@ -483,23 +436,23 @@ class PlanAParser extends Parser { _localctx = new DoContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(70); - match(DO); - setState(71); - block(); - setState(72); - match(WHILE); - setState(73); - match(LP); setState(74); - expression(0); + match(DO); setState(75); - match(RP); + block(); + setState(76); + match(WHILE); setState(77); + match(LP); + setState(78); + expression(0); + setState(79); + match(RP); + setState(81); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(76); + setState(80); match(SEMICOLON); } } @@ -510,27 +463,16 @@ class PlanAParser extends Parser { _localctx = new ForContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(79); + setState(83); match(FOR); - setState(80); - match(LP); - setState(82); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { - { - setState(81); - initializer(); - } - } - setState(84); - match(SEMICOLON); + match(LP); setState(86); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { { setState(85); - expression(0); + initializer(); } } @@ -541,56 +483,37 @@ class PlanAParser extends Parser { if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { { setState(89); - afterthought(); + expression(0); } } setState(92); - match(RP); - setState(95); - switch (_input.LA(1)) { - case LBRACK: - case LP: - case IF: - case WHILE: - case DO: - case FOR: - case CONTINUE: - case BREAK: - case RETURN: - case NEW: - case TRY: - case THROW: - case BOOLNOT: - case BWNOT: - case ADD: - case SUB: - case INCR: - case DECR: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case CHAR: - case TRUE: - case FALSE: - case NULL: - case TYPE: - case ID: + match(SEMICOLON); + setState(94); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { { setState(93); + afterthought(); + } + } + + setState(96); + match(RP); + setState(99); + switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { + case 1: + { + setState(97); block(); } break; - case SEMICOLON: + case 2: { - setState(94); + setState(98); empty(); } break; - default: - throw new NoViableAltException(this); } } break; @@ -598,25 +521,8 @@ class PlanAParser extends Parser { _localctx = new DeclContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(97); - declaration(); - setState(99); - _la = _input.LA(1); - if (_la==SEMICOLON) { - { - setState(98); - match(SEMICOLON); - } - } - - } - break; - case 6: - _localctx = new ContinueContext(_localctx); - enterOuterAlt(_localctx, 6); - { setState(101); - match(CONTINUE); + declaration(); setState(103); _la = _input.LA(1); if (_la==SEMICOLON) { @@ -628,12 +534,12 @@ class PlanAParser extends Parser { } break; - case 7: - _localctx = new BreakContext(_localctx); - enterOuterAlt(_localctx, 7); + case 6: + _localctx = new ContinueContext(_localctx); + enterOuterAlt(_localctx, 6); { setState(105); - match(BREAK); + match(CONTINUE); setState(107); _la = _input.LA(1); if (_la==SEMICOLON) { @@ -643,21 +549,38 @@ class PlanAParser extends Parser { } } + } + break; + case 7: + _localctx = new BreakContext(_localctx); + enterOuterAlt(_localctx, 7); + { + setState(109); + match(BREAK); + setState(111); + _la = _input.LA(1); + if (_la==SEMICOLON) { + { + setState(110); + match(SEMICOLON); + } + } + } break; case 8: _localctx = new ReturnContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(109); + setState(113); match(RETURN); - setState(110); + setState(114); expression(0); - setState(112); + setState(116); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(111); + setState(115); match(SEMICOLON); } } @@ -668,11 +591,11 @@ class PlanAParser extends Parser { _localctx = new TryContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(114); + setState(118); match(TRY); - setState(115); + setState(119); block(); - setState(123); + setState(121); _errHandler.sync(this); _alt = 1; do { @@ -680,27 +603,15 @@ class PlanAParser extends Parser { case 1: { { - setState(116); - match(CATCH); - setState(117); - match(LP); - { - setState(118); - match(TYPE); - setState(119); - match(ID); - } - setState(121); - match(RP); - setState(122); - block(); + setState(120); + trap(); } } break; default: throw new NoViableAltException(this); } - setState(125); + setState(123); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,12,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -710,15 +621,15 @@ class PlanAParser extends Parser { _localctx = new ThrowContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(127); + setState(125); match(THROW); - setState(128); + setState(126); expression(0); - setState(130); + setState(128); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(129); + setState(127); match(SEMICOLON); } } @@ -729,13 +640,13 @@ class PlanAParser extends Parser { _localctx = new ExprContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(132); + setState(130); expression(0); - setState(134); + setState(132); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(133); + setState(131); match(SEMICOLON); } } @@ -799,29 +710,29 @@ class PlanAParser extends Parser { enterRule(_localctx, 4, RULE_block); int _la; try { - setState(147); + setState(145); switch (_input.LA(1)) { case LBRACK: _localctx = new MultipleContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(138); + setState(136); match(LBRACK); - setState(142); + setState(138); _errHandler.sync(this); _la = _input.LA(1); - while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { + do { { { - setState(139); + setState(137); statement(); } } - setState(144); + setState(140); _errHandler.sync(this); _la = _input.LA(1); - } - setState(145); + } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0) ); + setState(142); match(RBRACK); } break; @@ -856,7 +767,7 @@ class PlanAParser extends Parser { _localctx = new SingleContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(146); + setState(144); statement(); } break; @@ -876,6 +787,9 @@ class PlanAParser extends Parser { } public static class EmptyContext extends ParserRuleContext { + public EmptyscopeContext emptyscope() { + return getRuleContext(EmptyscopeContext.class,0); + } public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } public EmptyContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -891,11 +805,62 @@ class PlanAParser extends Parser { public final EmptyContext empty() throws RecognitionException { EmptyContext _localctx = new EmptyContext(_ctx, getState()); enterRule(_localctx, 6, RULE_empty); + try { + setState(149); + switch (_input.LA(1)) { + case LBRACK: + enterOuterAlt(_localctx, 1); + { + setState(147); + emptyscope(); + } + break; + case SEMICOLON: + enterOuterAlt(_localctx, 2); + { + setState(148); + match(SEMICOLON); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class EmptyscopeContext extends ParserRuleContext { + public TerminalNode LBRACK() { return getToken(PlanAParser.LBRACK, 0); } + public TerminalNode RBRACK() { return getToken(PlanAParser.RBRACK, 0); } + public EmptyscopeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_emptyscope; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitEmptyscope(this); + else return visitor.visitChildren(this); + } + } + + public final EmptyscopeContext emptyscope() throws RecognitionException { + EmptyscopeContext _localctx = new EmptyscopeContext(_ctx, getState()); + enterRule(_localctx, 8, RULE_emptyscope); try { enterOuterAlt(_localctx, 1); { - setState(149); - match(SEMICOLON); + setState(151); + match(LBRACK); + setState(152); + match(RBRACK); } } catch (RecognitionException re) { @@ -929,21 +894,21 @@ class PlanAParser extends Parser { public final InitializerContext initializer() throws RecognitionException { InitializerContext _localctx = new InitializerContext(_ctx, getState()); - enterRule(_localctx, 8, RULE_initializer); + enterRule(_localctx, 10, RULE_initializer); try { - setState(153); - switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) { + setState(156); + switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(151); + setState(154); declaration(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(152); + setState(155); expression(0); } break; @@ -977,11 +942,11 @@ class PlanAParser extends Parser { public final AfterthoughtContext afterthought() throws RecognitionException { AfterthoughtContext _localctx = new AfterthoughtContext(_ctx, getState()); - enterRule(_localctx, 10, RULE_afterthought); + enterRule(_localctx, 12, RULE_afterthought); try { enterOuterAlt(_localctx, 1); { - setState(155); + setState(158); expression(0); } } @@ -1023,28 +988,28 @@ class PlanAParser extends Parser { public final DeclarationContext declaration() throws RecognitionException { DeclarationContext _localctx = new DeclarationContext(_ctx, getState()); - enterRule(_localctx, 12, RULE_declaration); + enterRule(_localctx, 14, RULE_declaration); int _la; try { enterOuterAlt(_localctx, 1); { - setState(157); + setState(160); decltype(); - setState(158); + setState(161); declvar(); - setState(163); + setState(166); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(159); + setState(162); match(COMMA); - setState(160); + setState(163); declvar(); } } - setState(165); + setState(168); _errHandler.sync(this); _la = _input.LA(1); } @@ -1084,26 +1049,26 @@ class PlanAParser extends Parser { public final DecltypeContext decltype() throws RecognitionException { DecltypeContext _localctx = new DecltypeContext(_ctx, getState()); - enterRule(_localctx, 14, RULE_decltype); + enterRule(_localctx, 16, RULE_decltype); int _la; try { enterOuterAlt(_localctx, 1); { - setState(166); + setState(169); match(TYPE); - setState(171); + setState(174); _errHandler.sync(this); _la = _input.LA(1); while (_la==LBRACE) { { { - setState(167); + setState(170); match(LBRACE); - setState(168); + setState(171); match(RBRACE); } } - setState(173); + setState(176); _errHandler.sync(this); _la = _input.LA(1); } @@ -1139,20 +1104,20 @@ class PlanAParser extends Parser { public final DeclvarContext declvar() throws RecognitionException { DeclvarContext _localctx = new DeclvarContext(_ctx, getState()); - enterRule(_localctx, 16, RULE_declvar); + enterRule(_localctx, 18, RULE_declvar); int _la; try { enterOuterAlt(_localctx, 1); { - setState(174); - match(ID); setState(177); + match(ID); + setState(180); _la = _input.LA(1); if (_la==ASSIGN) { { - setState(175); + setState(178); match(ASSIGN); - setState(176); + setState(179); expression(0); } } @@ -1170,6 +1135,75 @@ class PlanAParser extends Parser { return _localctx; } + public static class TrapContext extends ParserRuleContext { + public TerminalNode CATCH() { return getToken(PlanAParser.CATCH, 0); } + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode TYPE() { return getToken(PlanAParser.TYPE, 0); } + public TerminalNode ID() { return getToken(PlanAParser.ID, 0); } + public BlockContext block() { + return getRuleContext(BlockContext.class,0); + } + public EmptyscopeContext emptyscope() { + return getRuleContext(EmptyscopeContext.class,0); + } + public TrapContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_trap; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitTrap(this); + else return visitor.visitChildren(this); + } + } + + public final TrapContext trap() throws RecognitionException { + TrapContext _localctx = new TrapContext(_ctx, getState()); + enterRule(_localctx, 20, RULE_trap); + try { + enterOuterAlt(_localctx, 1); + { + setState(182); + match(CATCH); + setState(183); + match(LP); + { + setState(184); + match(TYPE); + setState(185); + match(ID); + } + setState(187); + match(RP); + setState(190); + switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { + case 1: + { + setState(188); + block(); + } + break; + case 2: + { + setState(189); + emptyscope(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public static class ExpressionContext extends ParserRuleContext { public ExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -1427,29 +1461,29 @@ class PlanAParser extends Parser { int _parentState = getState(); ExpressionContext _localctx = new ExpressionContext(_ctx, _parentState); ExpressionContext _prevctx = _localctx; - int _startState = 18; - enterRecursionRule(_localctx, 18, RULE_expression, _p); + int _startState = 22; + enterRecursionRule(_localctx, 22, RULE_expression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(207); - switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { + setState(220); + switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { _localctx = new UnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(180); + setState(193); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(181); + setState(194); expression(14); } break; @@ -1458,13 +1492,13 @@ class PlanAParser extends Parser { _localctx = new CastContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(182); + setState(195); match(LP); - setState(183); + setState(196); decltype(); - setState(184); + setState(197); match(RP); - setState(185); + setState(198); expression(13); } break; @@ -1473,16 +1507,16 @@ class PlanAParser extends Parser { _localctx = new AssignmentContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(187); + setState(200); extstart(); - setState(188); + setState(201); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ASSIGN) | (1L << AADD) | (1L << ASUB) | (1L << AMUL) | (1L << ADIV) | (1L << AREM) | (1L << AAND) | (1L << AXOR) | (1L << AOR) | (1L << ALSH) | (1L << ARSH) | (1L << AUSH))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(189); + setState(202); expression(1); } break; @@ -1491,11 +1525,11 @@ class PlanAParser extends Parser { _localctx = new PrecedenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(191); + setState(204); match(LP); - setState(192); + setState(205); expression(0); - setState(193); + setState(206); match(RP); } break; @@ -1504,7 +1538,7 @@ class PlanAParser extends Parser { _localctx = new NumericContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(195); + setState(208); _la = _input.LA(1); if ( !(((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)))) != 0)) ) { _errHandler.recoverInline(this); @@ -1518,7 +1552,7 @@ class PlanAParser extends Parser { _localctx = new CharContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(196); + setState(209); match(CHAR); } break; @@ -1527,7 +1561,7 @@ class PlanAParser extends Parser { _localctx = new TrueContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(197); + setState(210); match(TRUE); } break; @@ -1536,7 +1570,7 @@ class PlanAParser extends Parser { _localctx = new FalseContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(198); + setState(211); match(FALSE); } break; @@ -1545,7 +1579,7 @@ class PlanAParser extends Parser { _localctx = new NullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(199); + setState(212); match(NULL); } break; @@ -1554,9 +1588,9 @@ class PlanAParser extends Parser { _localctx = new PostincContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(200); + setState(213); extstart(); - setState(201); + setState(214); increment(); } break; @@ -1565,9 +1599,9 @@ class PlanAParser extends Parser { _localctx = new PreincContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(203); + setState(216); increment(); - setState(204); + setState(217); extstart(); } break; @@ -1576,36 +1610,36 @@ class PlanAParser extends Parser { _localctx = new ExternalContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(206); + setState(219); extstart(); } break; } _ctx.stop = _input.LT(-1); - setState(247); + setState(260); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(245); - switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { + setState(258); + switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(209); + setState(222); if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); - setState(210); + setState(223); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << MUL) | (1L << DIV) | (1L << REM))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(211); + setState(224); expression(13); } break; @@ -1613,16 +1647,16 @@ class PlanAParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(212); + setState(225); if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); - setState(213); + setState(226); _la = _input.LA(1); if ( !(_la==ADD || _la==SUB) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(214); + setState(227); expression(12); } break; @@ -1630,16 +1664,16 @@ class PlanAParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(215); + setState(228); if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(216); + setState(229); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LSH) | (1L << RSH) | (1L << USH))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(217); + setState(230); expression(11); } break; @@ -1647,16 +1681,16 @@ class PlanAParser extends Parser { { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(218); + setState(231); if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); - setState(219); + setState(232); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(220); + setState(233); expression(10); } break; @@ -1664,16 +1698,16 @@ class PlanAParser extends Parser { { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(221); + setState(234); if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); - setState(222); + setState(235); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << EQR) | (1L << NE) | (1L << NER))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(223); + setState(236); expression(9); } break; @@ -1681,11 +1715,11 @@ class PlanAParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(224); + setState(237); if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); - setState(225); + setState(238); match(BWAND); - setState(226); + setState(239); expression(8); } break; @@ -1693,11 +1727,11 @@ class PlanAParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(227); + setState(240); if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); - setState(228); + setState(241); match(BWXOR); - setState(229); + setState(242); expression(7); } break; @@ -1705,11 +1739,11 @@ class PlanAParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(230); + setState(243); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(231); + setState(244); match(BWOR); - setState(232); + setState(245); expression(6); } break; @@ -1717,11 +1751,11 @@ class PlanAParser extends Parser { { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(233); + setState(246); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(234); + setState(247); match(BOOLAND); - setState(235); + setState(248); expression(5); } break; @@ -1729,11 +1763,11 @@ class PlanAParser extends Parser { { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(236); + setState(249); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(237); + setState(250); match(BOOLOR); - setState(238); + setState(251); expression(4); } break; @@ -1741,24 +1775,24 @@ class PlanAParser extends Parser { { _localctx = new ConditionalContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(239); + setState(252); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(240); + setState(253); match(COND); - setState(241); + setState(254); expression(0); - setState(242); + setState(255); match(COLON); - setState(243); + setState(256); expression(2); } break; } } } - setState(249); + setState(262); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } } } @@ -1805,49 +1839,49 @@ class PlanAParser extends Parser { public final ExtstartContext extstart() throws RecognitionException { ExtstartContext _localctx = new ExtstartContext(_ctx, getState()); - enterRule(_localctx, 20, RULE_extstart); + enterRule(_localctx, 24, RULE_extstart); try { - setState(256); - switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { + setState(269); + switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(250); + setState(263); extprec(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(251); + setState(264); extcast(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(252); + setState(265); exttype(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(253); + setState(266); extvar(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(254); + setState(267); extnew(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(255); + setState(268); extstring(); } break; @@ -1904,64 +1938,64 @@ class PlanAParser extends Parser { public final ExtprecContext extprec() throws RecognitionException { ExtprecContext _localctx = new ExtprecContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_extprec); + enterRule(_localctx, 26, RULE_extprec); try { enterOuterAlt(_localctx, 1); { - setState(258); + setState(271); match(LP); - setState(265); - switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { + setState(278); + switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(259); + setState(272); extprec(); } break; case 2: { - setState(260); + setState(273); extcast(); } break; case 3: { - setState(261); + setState(274); exttype(); } break; case 4: { - setState(262); + setState(275); extvar(); } break; case 5: { - setState(263); + setState(276); extnew(); } break; case 6: { - setState(264); + setState(277); extstring(); } break; } - setState(267); + setState(280); match(RP); - setState(270); - switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { + setState(283); + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(268); + setState(281); extdot(); } break; case 2: { - setState(269); + setState(282); extbrace(); } break; @@ -2016,51 +2050,51 @@ class PlanAParser extends Parser { public final ExtcastContext extcast() throws RecognitionException { ExtcastContext _localctx = new ExtcastContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_extcast); + enterRule(_localctx, 28, RULE_extcast); try { enterOuterAlt(_localctx, 1); { - setState(272); + setState(285); match(LP); - setState(273); + setState(286); decltype(); - setState(274); + setState(287); match(RP); - setState(281); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { + setState(294); + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(275); + setState(288); extprec(); } break; case 2: { - setState(276); + setState(289); extcast(); } break; case 3: { - setState(277); + setState(290); exttype(); } break; case 4: { - setState(278); + setState(291); extvar(); } break; case 5: { - setState(279); + setState(292); extnew(); } break; case 6: { - setState(280); + setState(293); extstring(); } break; @@ -2103,27 +2137,27 @@ class PlanAParser extends Parser { public final ExtbraceContext extbrace() throws RecognitionException { ExtbraceContext _localctx = new ExtbraceContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_extbrace); + enterRule(_localctx, 30, RULE_extbrace); try { enterOuterAlt(_localctx, 1); { - setState(283); + setState(296); match(LBRACE); - setState(284); + setState(297); expression(0); - setState(285); + setState(298); match(RBRACE); - setState(288); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + setState(301); + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(286); + setState(299); extdot(); } break; case 2: { - setState(287); + setState(300); extbrace(); } break; @@ -2162,23 +2196,23 @@ class PlanAParser extends Parser { public final ExtdotContext extdot() throws RecognitionException { ExtdotContext _localctx = new ExtdotContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_extdot); + enterRule(_localctx, 32, RULE_extdot); try { enterOuterAlt(_localctx, 1); { - setState(290); + setState(303); match(DOT); - setState(293); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + setState(306); + switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(291); + setState(304); extcall(); } break; case 2: { - setState(292); + setState(305); extfield(); } break; @@ -2214,13 +2248,13 @@ class PlanAParser extends Parser { public final ExttypeContext exttype() throws RecognitionException { ExttypeContext _localctx = new ExttypeContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_exttype); + enterRule(_localctx, 34, RULE_exttype); try { enterOuterAlt(_localctx, 1); { - setState(295); + setState(308); match(TYPE); - setState(296); + setState(309); extdot(); } } @@ -2259,25 +2293,25 @@ class PlanAParser extends Parser { public final ExtcallContext extcall() throws RecognitionException { ExtcallContext _localctx = new ExtcallContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_extcall); + enterRule(_localctx, 36, RULE_extcall); try { enterOuterAlt(_localctx, 1); { - setState(298); + setState(311); match(EXTID); - setState(299); + setState(312); arguments(); - setState(302); - switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { + setState(315); + switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(300); + setState(313); extdot(); } break; case 2: { - setState(301); + setState(314); extbrace(); } break; @@ -2316,23 +2350,23 @@ class PlanAParser extends Parser { public final ExtvarContext extvar() throws RecognitionException { ExtvarContext _localctx = new ExtvarContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_extvar); + enterRule(_localctx, 38, RULE_extvar); try { enterOuterAlt(_localctx, 1); { - setState(304); + setState(317); match(ID); - setState(307); - switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { + setState(320); + switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { case 1: { - setState(305); + setState(318); extdot(); } break; case 2: { - setState(306); + setState(319); extbrace(); } break; @@ -2372,29 +2406,29 @@ class PlanAParser extends Parser { public final ExtfieldContext extfield() throws RecognitionException { ExtfieldContext _localctx = new ExtfieldContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_extfield); + enterRule(_localctx, 40, RULE_extfield); int _la; try { enterOuterAlt(_localctx, 1); { - setState(309); + setState(322); _la = _input.LA(1); if ( !(_la==EXTINTEGER || _la==EXTID) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(312); - switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { + setState(325); + switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(310); + setState(323); extdot(); } break; case 2: { - setState(311); + setState(324); extbrace(); } break; @@ -2451,33 +2485,33 @@ class PlanAParser extends Parser { public final ExtnewContext extnew() throws RecognitionException { ExtnewContext _localctx = new ExtnewContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_extnew); + enterRule(_localctx, 42, RULE_extnew); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(314); + setState(327); match(NEW); - setState(315); + setState(328); match(TYPE); - setState(332); + setState(345); switch (_input.LA(1)) { case LP: { { - setState(316); + setState(329); arguments(); - setState(319); - switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { + setState(332); + switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: { - setState(317); + setState(330); extdot(); } break; case 2: { - setState(318); + setState(331); extbrace(); } break; @@ -2488,7 +2522,7 @@ class PlanAParser extends Parser { case LBRACE: { { - setState(325); + setState(338); _errHandler.sync(this); _alt = 1; do { @@ -2496,11 +2530,11 @@ class PlanAParser extends Parser { case 1: { { - setState(321); + setState(334); match(LBRACE); - setState(322); + setState(335); expression(0); - setState(323); + setState(336); match(RBRACE); } } @@ -2508,15 +2542,15 @@ class PlanAParser extends Parser { default: throw new NoViableAltException(this); } - setState(327); + setState(340); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(330); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + setState(343); + switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { case 1: { - setState(329); + setState(342); extdot(); } break; @@ -2561,23 +2595,23 @@ class PlanAParser extends Parser { public final ExtstringContext extstring() throws RecognitionException { ExtstringContext _localctx = new ExtstringContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_extstring); + enterRule(_localctx, 44, RULE_extstring); try { enterOuterAlt(_localctx, 1); { - setState(334); + setState(347); match(STRING); - setState(337); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + setState(350); + switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { case 1: { - setState(335); + setState(348); extdot(); } break; case 2: { - setState(336); + setState(349); extbrace(); } break; @@ -2621,40 +2655,40 @@ class PlanAParser extends Parser { public final ArgumentsContext arguments() throws RecognitionException { ArgumentsContext _localctx = new ArgumentsContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_arguments); + enterRule(_localctx, 46, RULE_arguments); int _la; try { enterOuterAlt(_localctx, 1); { { - setState(339); + setState(352); match(LP); - setState(348); + setState(361); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { { - setState(340); + setState(353); expression(0); - setState(345); + setState(358); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(341); + setState(354); match(COMMA); - setState(342); + setState(355); expression(0); } } - setState(347); + setState(360); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(350); + setState(363); match(RP); } } @@ -2686,12 +2720,12 @@ class PlanAParser extends Parser { public final IncrementContext increment() throws RecognitionException { IncrementContext _localctx = new IncrementContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_increment); + enterRule(_localctx, 48, RULE_increment); int _la; try { enterOuterAlt(_localctx, 1); { - setState(352); + setState(365); _la = _input.LA(1); if ( !(_la==INCR || _la==DECR) ) { _errHandler.recoverInline(this); @@ -2713,7 +2747,7 @@ class PlanAParser extends Parser { public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 9: + case 11: return expression_sempred((ExpressionContext)_localctx, predIndex); } return true; @@ -2747,144 +2781,148 @@ class PlanAParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3N\u0165\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3N\u0172\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ - "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\3\2\6\2\62"+ - "\n\2\r\2\16\2\63\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3?\n\3\3\3\3\3"+ - "\3\3\3\3\3\3\3\3\5\3G\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3P\n\3\3\3\3\3"+ - "\3\3\5\3U\n\3\3\3\3\3\5\3Y\n\3\3\3\3\3\5\3]\n\3\3\3\3\3\3\3\5\3b\n\3\3"+ - "\3\3\3\5\3f\n\3\3\3\3\3\5\3j\n\3\3\3\3\3\5\3n\n\3\3\3\3\3\3\3\5\3s\n\3"+ - "\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\6\3~\n\3\r\3\16\3\177\3\3\3\3\3\3"+ - "\5\3\u0085\n\3\3\3\3\3\5\3\u0089\n\3\5\3\u008b\n\3\3\4\3\4\7\4\u008f\n"+ - "\4\f\4\16\4\u0092\13\4\3\4\3\4\5\4\u0096\n\4\3\5\3\5\3\6\3\6\5\6\u009c"+ - "\n\6\3\7\3\7\3\b\3\b\3\b\3\b\7\b\u00a4\n\b\f\b\16\b\u00a7\13\b\3\t\3\t"+ - "\3\t\7\t\u00ac\n\t\f\t\16\t\u00af\13\t\3\n\3\n\3\n\5\n\u00b4\n\n\3\13"+ - "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13"+ - "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\5\13"+ - "\u00d2\n\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13"+ - "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13"+ - "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\7\13\u00f8\n\13\f\13"+ - "\16\13\u00fb\13\13\3\f\3\f\3\f\3\f\3\f\3\f\5\f\u0103\n\f\3\r\3\r\3\r\3"+ - "\r\3\r\3\r\3\r\5\r\u010c\n\r\3\r\3\r\3\r\5\r\u0111\n\r\3\16\3\16\3\16"+ - "\3\16\3\16\3\16\3\16\3\16\3\16\5\16\u011c\n\16\3\17\3\17\3\17\3\17\3\17"+ - "\5\17\u0123\n\17\3\20\3\20\3\20\5\20\u0128\n\20\3\21\3\21\3\21\3\22\3"+ - "\22\3\22\3\22\5\22\u0131\n\22\3\23\3\23\3\23\5\23\u0136\n\23\3\24\3\24"+ - "\3\24\5\24\u013b\n\24\3\25\3\25\3\25\3\25\3\25\5\25\u0142\n\25\3\25\3"+ - "\25\3\25\3\25\6\25\u0148\n\25\r\25\16\25\u0149\3\25\5\25\u014d\n\25\5"+ - "\25\u014f\n\25\3\26\3\26\3\26\5\26\u0154\n\26\3\27\3\27\3\27\3\27\7\27"+ - "\u015a\n\27\f\27\16\27\u015d\13\27\5\27\u015f\n\27\3\27\3\27\3\30\3\30"+ - "\3\30\2\3\24\31\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\2\f\4"+ - "\2\32\33\37 \3\2\65@\3\2BE\3\2\34\36\3\2\37 \3\2!#\3\2$\'\3\2(+\3\2MN"+ - "\3\2\63\64\u01a5\2\61\3\2\2\2\4\u008a\3\2\2\2\6\u0095\3\2\2\2\b\u0097"+ - "\3\2\2\2\n\u009b\3\2\2\2\f\u009d\3\2\2\2\16\u009f\3\2\2\2\20\u00a8\3\2"+ - "\2\2\22\u00b0\3\2\2\2\24\u00d1\3\2\2\2\26\u0102\3\2\2\2\30\u0104\3\2\2"+ - "\2\32\u0112\3\2\2\2\34\u011d\3\2\2\2\36\u0124\3\2\2\2 \u0129\3\2\2\2\""+ - "\u012c\3\2\2\2$\u0132\3\2\2\2&\u0137\3\2\2\2(\u013c\3\2\2\2*\u0150\3\2"+ - "\2\2,\u0155\3\2\2\2.\u0162\3\2\2\2\60\62\5\4\3\2\61\60\3\2\2\2\62\63\3"+ - "\2\2\2\63\61\3\2\2\2\63\64\3\2\2\2\64\65\3\2\2\2\65\66\7\2\2\3\66\3\3"+ - "\2\2\2\678\7\16\2\289\7\t\2\29:\5\24\13\2:;\7\n\2\2;>\5\6\4\2<=\7\17\2"+ - "\2=?\5\6\4\2><\3\2\2\2>?\3\2\2\2?\u008b\3\2\2\2@A\7\20\2\2AB\7\t\2\2B"+ - "C\5\24\13\2CF\7\n\2\2DG\5\6\4\2EG\5\b\5\2FD\3\2\2\2FE\3\2\2\2G\u008b\3"+ - "\2\2\2HI\7\21\2\2IJ\5\6\4\2JK\7\20\2\2KL\7\t\2\2LM\5\24\13\2MO\7\n\2\2"+ - "NP\7\r\2\2ON\3\2\2\2OP\3\2\2\2P\u008b\3\2\2\2QR\7\22\2\2RT\7\t\2\2SU\5"+ - "\n\6\2TS\3\2\2\2TU\3\2\2\2UV\3\2\2\2VX\7\r\2\2WY\5\24\13\2XW\3\2\2\2X"+ - "Y\3\2\2\2YZ\3\2\2\2Z\\\7\r\2\2[]\5\f\7\2\\[\3\2\2\2\\]\3\2\2\2]^\3\2\2"+ - "\2^a\7\n\2\2_b\5\6\4\2`b\5\b\5\2a_\3\2\2\2a`\3\2\2\2b\u008b\3\2\2\2ce"+ - "\5\16\b\2df\7\r\2\2ed\3\2\2\2ef\3\2\2\2f\u008b\3\2\2\2gi\7\23\2\2hj\7"+ - "\r\2\2ih\3\2\2\2ij\3\2\2\2j\u008b\3\2\2\2km\7\24\2\2ln\7\r\2\2ml\3\2\2"+ - "\2mn\3\2\2\2n\u008b\3\2\2\2op\7\25\2\2pr\5\24\13\2qs\7\r\2\2rq\3\2\2\2"+ - "rs\3\2\2\2s\u008b\3\2\2\2tu\7\27\2\2u}\5\6\4\2vw\7\30\2\2wx\7\t\2\2xy"+ - "\7K\2\2yz\7L\2\2z{\3\2\2\2{|\7\n\2\2|~\5\6\4\2}v\3\2\2\2~\177\3\2\2\2"+ - "\177}\3\2\2\2\177\u0080\3\2\2\2\u0080\u008b\3\2\2\2\u0081\u0082\7\31\2"+ - "\2\u0082\u0084\5\24\13\2\u0083\u0085\7\r\2\2\u0084\u0083\3\2\2\2\u0084"+ - "\u0085\3\2\2\2\u0085\u008b\3\2\2\2\u0086\u0088\5\24\13\2\u0087\u0089\7"+ - "\r\2\2\u0088\u0087\3\2\2\2\u0088\u0089\3\2\2\2\u0089\u008b\3\2\2\2\u008a"+ - "\67\3\2\2\2\u008a@\3\2\2\2\u008aH\3\2\2\2\u008aQ\3\2\2\2\u008ac\3\2\2"+ - "\2\u008ag\3\2\2\2\u008ak\3\2\2\2\u008ao\3\2\2\2\u008at\3\2\2\2\u008a\u0081"+ - "\3\2\2\2\u008a\u0086\3\2\2\2\u008b\5\3\2\2\2\u008c\u0090\7\5\2\2\u008d"+ - "\u008f\5\4\3\2\u008e\u008d\3\2\2\2\u008f\u0092\3\2\2\2\u0090\u008e\3\2"+ - "\2\2\u0090\u0091\3\2\2\2\u0091\u0093\3\2\2\2\u0092\u0090\3\2\2\2\u0093"+ - "\u0096\7\6\2\2\u0094\u0096\5\4\3\2\u0095\u008c\3\2\2\2\u0095\u0094\3\2"+ - "\2\2\u0096\7\3\2\2\2\u0097\u0098\7\r\2\2\u0098\t\3\2\2\2\u0099\u009c\5"+ - "\16\b\2\u009a\u009c\5\24\13\2\u009b\u0099\3\2\2\2\u009b\u009a\3\2\2\2"+ - "\u009c\13\3\2\2\2\u009d\u009e\5\24\13\2\u009e\r\3\2\2\2\u009f\u00a0\5"+ - "\20\t\2\u00a0\u00a5\5\22\n\2\u00a1\u00a2\7\f\2\2\u00a2\u00a4\5\22\n\2"+ - "\u00a3\u00a1\3\2\2\2\u00a4\u00a7\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a5\u00a6"+ - "\3\2\2\2\u00a6\17\3\2\2\2\u00a7\u00a5\3\2\2\2\u00a8\u00ad\7K\2\2\u00a9"+ - "\u00aa\7\7\2\2\u00aa\u00ac\7\b\2\2\u00ab\u00a9\3\2\2\2\u00ac\u00af\3\2"+ - "\2\2\u00ad\u00ab\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ae\21\3\2\2\2\u00af\u00ad"+ - "\3\2\2\2\u00b0\u00b3\7L\2\2\u00b1\u00b2\7\65\2\2\u00b2\u00b4\5\24\13\2"+ - "\u00b3\u00b1\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\23\3\2\2\2\u00b5\u00b6"+ - "\b\13\1\2\u00b6\u00b7\t\2\2\2\u00b7\u00d2\5\24\13\20\u00b8\u00b9\7\t\2"+ - "\2\u00b9\u00ba\5\20\t\2\u00ba\u00bb\7\n\2\2\u00bb\u00bc\5\24\13\17\u00bc"+ - "\u00d2\3\2\2\2\u00bd\u00be\5\26\f\2\u00be\u00bf\t\3\2\2\u00bf\u00c0\5"+ - "\24\13\3\u00c0\u00d2\3\2\2\2\u00c1\u00c2\7\t\2\2\u00c2\u00c3\5\24\13\2"+ - "\u00c3\u00c4\7\n\2\2\u00c4\u00d2\3\2\2\2\u00c5\u00d2\t\4\2\2\u00c6\u00d2"+ - "\7G\2\2\u00c7\u00d2\7H\2\2\u00c8\u00d2\7I\2\2\u00c9\u00d2\7J\2\2\u00ca"+ - "\u00cb\5\26\f\2\u00cb\u00cc\5.\30\2\u00cc\u00d2\3\2\2\2\u00cd\u00ce\5"+ - ".\30\2\u00ce\u00cf\5\26\f\2\u00cf\u00d2\3\2\2\2\u00d0\u00d2\5\26\f\2\u00d1"+ - "\u00b5\3\2\2\2\u00d1\u00b8\3\2\2\2\u00d1\u00bd\3\2\2\2\u00d1\u00c1\3\2"+ - "\2\2\u00d1\u00c5\3\2\2\2\u00d1\u00c6\3\2\2\2\u00d1\u00c7\3\2\2\2\u00d1"+ - "\u00c8\3\2\2\2\u00d1\u00c9\3\2\2\2\u00d1\u00ca\3\2\2\2\u00d1\u00cd\3\2"+ - "\2\2\u00d1\u00d0\3\2\2\2\u00d2\u00f9\3\2\2\2\u00d3\u00d4\f\16\2\2\u00d4"+ - "\u00d5\t\5\2\2\u00d5\u00f8\5\24\13\17\u00d6\u00d7\f\r\2\2\u00d7\u00d8"+ - "\t\6\2\2\u00d8\u00f8\5\24\13\16\u00d9\u00da\f\f\2\2\u00da\u00db\t\7\2"+ - "\2\u00db\u00f8\5\24\13\r\u00dc\u00dd\f\13\2\2\u00dd\u00de\t\b\2\2\u00de"+ - "\u00f8\5\24\13\f\u00df\u00e0\f\n\2\2\u00e0\u00e1\t\t\2\2\u00e1\u00f8\5"+ - "\24\13\13\u00e2\u00e3\f\t\2\2\u00e3\u00e4\7,\2\2\u00e4\u00f8\5\24\13\n"+ - "\u00e5\u00e6\f\b\2\2\u00e6\u00e7\7-\2\2\u00e7\u00f8\5\24\13\t\u00e8\u00e9"+ - "\f\7\2\2\u00e9\u00ea\7.\2\2\u00ea\u00f8\5\24\13\b\u00eb\u00ec\f\6\2\2"+ - "\u00ec\u00ed\7/\2\2\u00ed\u00f8\5\24\13\7\u00ee\u00ef\f\5\2\2\u00ef\u00f0"+ - "\7\60\2\2\u00f0\u00f8\5\24\13\6\u00f1\u00f2\f\4\2\2\u00f2\u00f3\7\61\2"+ - "\2\u00f3\u00f4\5\24\13\2\u00f4\u00f5\7\62\2\2\u00f5\u00f6\5\24\13\4\u00f6"+ - "\u00f8\3\2\2\2\u00f7\u00d3\3\2\2\2\u00f7\u00d6\3\2\2\2\u00f7\u00d9\3\2"+ - "\2\2\u00f7\u00dc\3\2\2\2\u00f7\u00df\3\2\2\2\u00f7\u00e2\3\2\2\2\u00f7"+ - "\u00e5\3\2\2\2\u00f7\u00e8\3\2\2\2\u00f7\u00eb\3\2\2\2\u00f7\u00ee\3\2"+ - "\2\2\u00f7\u00f1\3\2\2\2\u00f8\u00fb\3\2\2\2\u00f9\u00f7\3\2\2\2\u00f9"+ - "\u00fa\3\2\2\2\u00fa\25\3\2\2\2\u00fb\u00f9\3\2\2\2\u00fc\u0103\5\30\r"+ - "\2\u00fd\u0103\5\32\16\2\u00fe\u0103\5 \21\2\u00ff\u0103\5$\23\2\u0100"+ - "\u0103\5(\25\2\u0101\u0103\5*\26\2\u0102\u00fc\3\2\2\2\u0102\u00fd\3\2"+ - "\2\2\u0102\u00fe\3\2\2\2\u0102\u00ff\3\2\2\2\u0102\u0100\3\2\2\2\u0102"+ - "\u0101\3\2\2\2\u0103\27\3\2\2\2\u0104\u010b\7\t\2\2\u0105\u010c\5\30\r"+ - "\2\u0106\u010c\5\32\16\2\u0107\u010c\5 \21\2\u0108\u010c\5$\23\2\u0109"+ - "\u010c\5(\25\2\u010a\u010c\5*\26\2\u010b\u0105\3\2\2\2\u010b\u0106\3\2"+ - "\2\2\u010b\u0107\3\2\2\2\u010b\u0108\3\2\2\2\u010b\u0109\3\2\2\2\u010b"+ - "\u010a\3\2\2\2\u010c\u010d\3\2\2\2\u010d\u0110\7\n\2\2\u010e\u0111\5\36"+ - "\20\2\u010f\u0111\5\34\17\2\u0110\u010e\3\2\2\2\u0110\u010f\3\2\2\2\u0110"+ - "\u0111\3\2\2\2\u0111\31\3\2\2\2\u0112\u0113\7\t\2\2\u0113\u0114\5\20\t"+ - "\2\u0114\u011b\7\n\2\2\u0115\u011c\5\30\r\2\u0116\u011c\5\32\16\2\u0117"+ - "\u011c\5 \21\2\u0118\u011c\5$\23\2\u0119\u011c\5(\25\2\u011a\u011c\5*"+ - "\26\2\u011b\u0115\3\2\2\2\u011b\u0116\3\2\2\2\u011b\u0117\3\2\2\2\u011b"+ - "\u0118\3\2\2\2\u011b\u0119\3\2\2\2\u011b\u011a\3\2\2\2\u011c\33\3\2\2"+ - "\2\u011d\u011e\7\7\2\2\u011e\u011f\5\24\13\2\u011f\u0122\7\b\2\2\u0120"+ - "\u0123\5\36\20\2\u0121\u0123\5\34\17\2\u0122\u0120\3\2\2\2\u0122\u0121"+ - "\3\2\2\2\u0122\u0123\3\2\2\2\u0123\35\3\2\2\2\u0124\u0127\7\13\2\2\u0125"+ - "\u0128\5\"\22\2\u0126\u0128\5&\24\2\u0127\u0125\3\2\2\2\u0127\u0126\3"+ - "\2\2\2\u0128\37\3\2\2\2\u0129\u012a\7K\2\2\u012a\u012b\5\36\20\2\u012b"+ - "!\3\2\2\2\u012c\u012d\7N\2\2\u012d\u0130\5,\27\2\u012e\u0131\5\36\20\2"+ - "\u012f\u0131\5\34\17\2\u0130\u012e\3\2\2\2\u0130\u012f\3\2\2\2\u0130\u0131"+ - "\3\2\2\2\u0131#\3\2\2\2\u0132\u0135\7L\2\2\u0133\u0136\5\36\20\2\u0134"+ - "\u0136\5\34\17\2\u0135\u0133\3\2\2\2\u0135\u0134\3\2\2\2\u0135\u0136\3"+ - "\2\2\2\u0136%\3\2\2\2\u0137\u013a\t\n\2\2\u0138\u013b\5\36\20\2\u0139"+ - "\u013b\5\34\17\2\u013a\u0138\3\2\2\2\u013a\u0139\3\2\2\2\u013a\u013b\3"+ - "\2\2\2\u013b\'\3\2\2\2\u013c\u013d\7\26\2\2\u013d\u014e\7K\2\2\u013e\u0141"+ - "\5,\27\2\u013f\u0142\5\36\20\2\u0140\u0142\5\34\17\2\u0141\u013f\3\2\2"+ - "\2\u0141\u0140\3\2\2\2\u0141\u0142\3\2\2\2\u0142\u014f\3\2\2\2\u0143\u0144"+ - "\7\7\2\2\u0144\u0145\5\24\13\2\u0145\u0146\7\b\2\2\u0146\u0148\3\2\2\2"+ - "\u0147\u0143\3\2\2\2\u0148\u0149\3\2\2\2\u0149\u0147\3\2\2\2\u0149\u014a"+ - "\3\2\2\2\u014a\u014c\3\2\2\2\u014b\u014d\5\36\20\2\u014c\u014b\3\2\2\2"+ - "\u014c\u014d\3\2\2\2\u014d\u014f\3\2\2\2\u014e\u013e\3\2\2\2\u014e\u0147"+ - "\3\2\2\2\u014f)\3\2\2\2\u0150\u0153\7F\2\2\u0151\u0154\5\36\20\2\u0152"+ - "\u0154\5\34\17\2\u0153\u0151\3\2\2\2\u0153\u0152\3\2\2\2\u0153\u0154\3"+ - "\2\2\2\u0154+\3\2\2\2\u0155\u015e\7\t\2\2\u0156\u015b\5\24\13\2\u0157"+ - "\u0158\7\f\2\2\u0158\u015a\5\24\13\2\u0159\u0157\3\2\2\2\u015a\u015d\3"+ - "\2\2\2\u015b\u0159\3\2\2\2\u015b\u015c\3\2\2\2\u015c\u015f\3\2\2\2\u015d"+ - "\u015b\3\2\2\2\u015e\u0156\3\2\2\2\u015e\u015f\3\2\2\2\u015f\u0160\3\2"+ - "\2\2\u0160\u0161\7\n\2\2\u0161-\3\2\2\2\u0162\u0163\t\13\2\2\u0163/\3"+ - "\2\2\2+\63>FOTX\\aeimr\177\u0084\u0088\u008a\u0090\u0095\u009b\u00a5\u00ad"+ - "\u00b3\u00d1\u00f7\u00f9\u0102\u010b\u0110\u011b\u0122\u0127\u0130\u0135"+ - "\u013a\u0141\u0149\u014c\u014e\u0153\u015b\u015e"; + "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ + "\4\32\t\32\3\2\6\2\66\n\2\r\2\16\2\67\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3"+ + "\3\3\5\3C\n\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3K\n\3\3\3\3\3\3\3\3\3\3\3\3\3"+ + "\3\3\5\3T\n\3\3\3\3\3\3\3\5\3Y\n\3\3\3\3\3\5\3]\n\3\3\3\3\3\5\3a\n\3\3"+ + "\3\3\3\3\3\5\3f\n\3\3\3\3\3\5\3j\n\3\3\3\3\3\5\3n\n\3\3\3\3\3\5\3r\n\3"+ + "\3\3\3\3\3\3\5\3w\n\3\3\3\3\3\3\3\6\3|\n\3\r\3\16\3}\3\3\3\3\3\3\5\3\u0083"+ + "\n\3\3\3\3\3\5\3\u0087\n\3\5\3\u0089\n\3\3\4\3\4\6\4\u008d\n\4\r\4\16"+ + "\4\u008e\3\4\3\4\3\4\5\4\u0094\n\4\3\5\3\5\5\5\u0098\n\5\3\6\3\6\3\6\3"+ + "\7\3\7\5\7\u009f\n\7\3\b\3\b\3\t\3\t\3\t\3\t\7\t\u00a7\n\t\f\t\16\t\u00aa"+ + "\13\t\3\n\3\n\3\n\7\n\u00af\n\n\f\n\16\n\u00b2\13\n\3\13\3\13\3\13\5\13"+ + "\u00b7\n\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\5\f\u00c1\n\f\3\r\3\r\3\r"+ + "\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3"+ + "\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\5\r\u00df\n\r\3\r\3\r\3\r\3\r\3\r\3\r\3"+ + "\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r"+ + "\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\7\r\u0105\n\r\f\r\16"+ + "\r\u0108\13\r\3\16\3\16\3\16\3\16\3\16\3\16\5\16\u0110\n\16\3\17\3\17"+ + "\3\17\3\17\3\17\3\17\3\17\5\17\u0119\n\17\3\17\3\17\3\17\5\17\u011e\n"+ + "\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\5\20\u0129\n\20\3\21"+ + "\3\21\3\21\3\21\3\21\5\21\u0130\n\21\3\22\3\22\3\22\5\22\u0135\n\22\3"+ + "\23\3\23\3\23\3\24\3\24\3\24\3\24\5\24\u013e\n\24\3\25\3\25\3\25\5\25"+ + "\u0143\n\25\3\26\3\26\3\26\5\26\u0148\n\26\3\27\3\27\3\27\3\27\3\27\5"+ + "\27\u014f\n\27\3\27\3\27\3\27\3\27\6\27\u0155\n\27\r\27\16\27\u0156\3"+ + "\27\5\27\u015a\n\27\5\27\u015c\n\27\3\30\3\30\3\30\5\30\u0161\n\30\3\31"+ + "\3\31\3\31\3\31\7\31\u0167\n\31\f\31\16\31\u016a\13\31\5\31\u016c\n\31"+ + "\3\31\3\31\3\32\3\32\3\32\2\3\30\33\2\4\6\b\n\f\16\20\22\24\26\30\32\34"+ + "\36 \"$&(*,.\60\62\2\f\4\2\32\33\37 \3\2\65@\3\2BE\3\2\34\36\3\2\37 \3"+ + "\2!#\3\2$\'\3\2(+\3\2MN\3\2\63\64\u01b2\2\65\3\2\2\2\4\u0088\3\2\2\2\6"+ + "\u0093\3\2\2\2\b\u0097\3\2\2\2\n\u0099\3\2\2\2\f\u009e\3\2\2\2\16\u00a0"+ + "\3\2\2\2\20\u00a2\3\2\2\2\22\u00ab\3\2\2\2\24\u00b3\3\2\2\2\26\u00b8\3"+ + "\2\2\2\30\u00de\3\2\2\2\32\u010f\3\2\2\2\34\u0111\3\2\2\2\36\u011f\3\2"+ + "\2\2 \u012a\3\2\2\2\"\u0131\3\2\2\2$\u0136\3\2\2\2&\u0139\3\2\2\2(\u013f"+ + "\3\2\2\2*\u0144\3\2\2\2,\u0149\3\2\2\2.\u015d\3\2\2\2\60\u0162\3\2\2\2"+ + "\62\u016f\3\2\2\2\64\66\5\4\3\2\65\64\3\2\2\2\66\67\3\2\2\2\67\65\3\2"+ + "\2\2\678\3\2\2\289\3\2\2\29:\7\2\2\3:\3\3\2\2\2;<\7\16\2\2<=\7\t\2\2="+ + ">\5\30\r\2>?\7\n\2\2?B\5\6\4\2@A\7\17\2\2AC\5\6\4\2B@\3\2\2\2BC\3\2\2"+ + "\2C\u0089\3\2\2\2DE\7\20\2\2EF\7\t\2\2FG\5\30\r\2GJ\7\n\2\2HK\5\6\4\2"+ + "IK\5\b\5\2JH\3\2\2\2JI\3\2\2\2K\u0089\3\2\2\2LM\7\21\2\2MN\5\6\4\2NO\7"+ + "\20\2\2OP\7\t\2\2PQ\5\30\r\2QS\7\n\2\2RT\7\r\2\2SR\3\2\2\2ST\3\2\2\2T"+ + "\u0089\3\2\2\2UV\7\22\2\2VX\7\t\2\2WY\5\f\7\2XW\3\2\2\2XY\3\2\2\2YZ\3"+ + "\2\2\2Z\\\7\r\2\2[]\5\30\r\2\\[\3\2\2\2\\]\3\2\2\2]^\3\2\2\2^`\7\r\2\2"+ + "_a\5\16\b\2`_\3\2\2\2`a\3\2\2\2ab\3\2\2\2be\7\n\2\2cf\5\6\4\2df\5\b\5"+ + "\2ec\3\2\2\2ed\3\2\2\2f\u0089\3\2\2\2gi\5\20\t\2hj\7\r\2\2ih\3\2\2\2i"+ + "j\3\2\2\2j\u0089\3\2\2\2km\7\23\2\2ln\7\r\2\2ml\3\2\2\2mn\3\2\2\2n\u0089"+ + "\3\2\2\2oq\7\24\2\2pr\7\r\2\2qp\3\2\2\2qr\3\2\2\2r\u0089\3\2\2\2st\7\25"+ + "\2\2tv\5\30\r\2uw\7\r\2\2vu\3\2\2\2vw\3\2\2\2w\u0089\3\2\2\2xy\7\27\2"+ + "\2y{\5\6\4\2z|\5\26\f\2{z\3\2\2\2|}\3\2\2\2}{\3\2\2\2}~\3\2\2\2~\u0089"+ + "\3\2\2\2\177\u0080\7\31\2\2\u0080\u0082\5\30\r\2\u0081\u0083\7\r\2\2\u0082"+ + "\u0081\3\2\2\2\u0082\u0083\3\2\2\2\u0083\u0089\3\2\2\2\u0084\u0086\5\30"+ + "\r\2\u0085\u0087\7\r\2\2\u0086\u0085\3\2\2\2\u0086\u0087\3\2\2\2\u0087"+ + "\u0089\3\2\2\2\u0088;\3\2\2\2\u0088D\3\2\2\2\u0088L\3\2\2\2\u0088U\3\2"+ + "\2\2\u0088g\3\2\2\2\u0088k\3\2\2\2\u0088o\3\2\2\2\u0088s\3\2\2\2\u0088"+ + "x\3\2\2\2\u0088\177\3\2\2\2\u0088\u0084\3\2\2\2\u0089\5\3\2\2\2\u008a"+ + "\u008c\7\5\2\2\u008b\u008d\5\4\3\2\u008c\u008b\3\2\2\2\u008d\u008e\3\2"+ + "\2\2\u008e\u008c\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u0090\3\2\2\2\u0090"+ + "\u0091\7\6\2\2\u0091\u0094\3\2\2\2\u0092\u0094\5\4\3\2\u0093\u008a\3\2"+ + "\2\2\u0093\u0092\3\2\2\2\u0094\7\3\2\2\2\u0095\u0098\5\n\6\2\u0096\u0098"+ + "\7\r\2\2\u0097\u0095\3\2\2\2\u0097\u0096\3\2\2\2\u0098\t\3\2\2\2\u0099"+ + "\u009a\7\5\2\2\u009a\u009b\7\6\2\2\u009b\13\3\2\2\2\u009c\u009f\5\20\t"+ + "\2\u009d\u009f\5\30\r\2\u009e\u009c\3\2\2\2\u009e\u009d\3\2\2\2\u009f"+ + "\r\3\2\2\2\u00a0\u00a1\5\30\r\2\u00a1\17\3\2\2\2\u00a2\u00a3\5\22\n\2"+ + "\u00a3\u00a8\5\24\13\2\u00a4\u00a5\7\f\2\2\u00a5\u00a7\5\24\13\2\u00a6"+ + "\u00a4\3\2\2\2\u00a7\u00aa\3\2\2\2\u00a8\u00a6\3\2\2\2\u00a8\u00a9\3\2"+ + "\2\2\u00a9\21\3\2\2\2\u00aa\u00a8\3\2\2\2\u00ab\u00b0\7K\2\2\u00ac\u00ad"+ + "\7\7\2\2\u00ad\u00af\7\b\2\2\u00ae\u00ac\3\2\2\2\u00af\u00b2\3\2\2\2\u00b0"+ + "\u00ae\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1\23\3\2\2\2\u00b2\u00b0\3\2\2"+ + "\2\u00b3\u00b6\7L\2\2\u00b4\u00b5\7\65\2\2\u00b5\u00b7\5\30\r\2\u00b6"+ + "\u00b4\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\25\3\2\2\2\u00b8\u00b9\7\30\2"+ + "\2\u00b9\u00ba\7\t\2\2\u00ba\u00bb\7K\2\2\u00bb\u00bc\7L\2\2\u00bc\u00bd"+ + "\3\2\2\2\u00bd\u00c0\7\n\2\2\u00be\u00c1\5\6\4\2\u00bf\u00c1\5\n\6\2\u00c0"+ + "\u00be\3\2\2\2\u00c0\u00bf\3\2\2\2\u00c1\27\3\2\2\2\u00c2\u00c3\b\r\1"+ + "\2\u00c3\u00c4\t\2\2\2\u00c4\u00df\5\30\r\20\u00c5\u00c6\7\t\2\2\u00c6"+ + "\u00c7\5\22\n\2\u00c7\u00c8\7\n\2\2\u00c8\u00c9\5\30\r\17\u00c9\u00df"+ + "\3\2\2\2\u00ca\u00cb\5\32\16\2\u00cb\u00cc\t\3\2\2\u00cc\u00cd\5\30\r"+ + "\3\u00cd\u00df\3\2\2\2\u00ce\u00cf\7\t\2\2\u00cf\u00d0\5\30\r\2\u00d0"+ + "\u00d1\7\n\2\2\u00d1\u00df\3\2\2\2\u00d2\u00df\t\4\2\2\u00d3\u00df\7G"+ + "\2\2\u00d4\u00df\7H\2\2\u00d5\u00df\7I\2\2\u00d6\u00df\7J\2\2\u00d7\u00d8"+ + "\5\32\16\2\u00d8\u00d9\5\62\32\2\u00d9\u00df\3\2\2\2\u00da\u00db\5\62"+ + "\32\2\u00db\u00dc\5\32\16\2\u00dc\u00df\3\2\2\2\u00dd\u00df\5\32\16\2"+ + "\u00de\u00c2\3\2\2\2\u00de\u00c5\3\2\2\2\u00de\u00ca\3\2\2\2\u00de\u00ce"+ + "\3\2\2\2\u00de\u00d2\3\2\2\2\u00de\u00d3\3\2\2\2\u00de\u00d4\3\2\2\2\u00de"+ + "\u00d5\3\2\2\2\u00de\u00d6\3\2\2\2\u00de\u00d7\3\2\2\2\u00de\u00da\3\2"+ + "\2\2\u00de\u00dd\3\2\2\2\u00df\u0106\3\2\2\2\u00e0\u00e1\f\16\2\2\u00e1"+ + "\u00e2\t\5\2\2\u00e2\u0105\5\30\r\17\u00e3\u00e4\f\r\2\2\u00e4\u00e5\t"+ + "\6\2\2\u00e5\u0105\5\30\r\16\u00e6\u00e7\f\f\2\2\u00e7\u00e8\t\7\2\2\u00e8"+ + "\u0105\5\30\r\r\u00e9\u00ea\f\13\2\2\u00ea\u00eb\t\b\2\2\u00eb\u0105\5"+ + "\30\r\f\u00ec\u00ed\f\n\2\2\u00ed\u00ee\t\t\2\2\u00ee\u0105\5\30\r\13"+ + "\u00ef\u00f0\f\t\2\2\u00f0\u00f1\7,\2\2\u00f1\u0105\5\30\r\n\u00f2\u00f3"+ + "\f\b\2\2\u00f3\u00f4\7-\2\2\u00f4\u0105\5\30\r\t\u00f5\u00f6\f\7\2\2\u00f6"+ + "\u00f7\7.\2\2\u00f7\u0105\5\30\r\b\u00f8\u00f9\f\6\2\2\u00f9\u00fa\7/"+ + "\2\2\u00fa\u0105\5\30\r\7\u00fb\u00fc\f\5\2\2\u00fc\u00fd\7\60\2\2\u00fd"+ + "\u0105\5\30\r\6\u00fe\u00ff\f\4\2\2\u00ff\u0100\7\61\2\2\u0100\u0101\5"+ + "\30\r\2\u0101\u0102\7\62\2\2\u0102\u0103\5\30\r\4\u0103\u0105\3\2\2\2"+ + "\u0104\u00e0\3\2\2\2\u0104\u00e3\3\2\2\2\u0104\u00e6\3\2\2\2\u0104\u00e9"+ + "\3\2\2\2\u0104\u00ec\3\2\2\2\u0104\u00ef\3\2\2\2\u0104\u00f2\3\2\2\2\u0104"+ + "\u00f5\3\2\2\2\u0104\u00f8\3\2\2\2\u0104\u00fb\3\2\2\2\u0104\u00fe\3\2"+ + "\2\2\u0105\u0108\3\2\2\2\u0106\u0104\3\2\2\2\u0106\u0107\3\2\2\2\u0107"+ + "\31\3\2\2\2\u0108\u0106\3\2\2\2\u0109\u0110\5\34\17\2\u010a\u0110\5\36"+ + "\20\2\u010b\u0110\5$\23\2\u010c\u0110\5(\25\2\u010d\u0110\5,\27\2\u010e"+ + "\u0110\5.\30\2\u010f\u0109\3\2\2\2\u010f\u010a\3\2\2\2\u010f\u010b\3\2"+ + "\2\2\u010f\u010c\3\2\2\2\u010f\u010d\3\2\2\2\u010f\u010e\3\2\2\2\u0110"+ + "\33\3\2\2\2\u0111\u0118\7\t\2\2\u0112\u0119\5\34\17\2\u0113\u0119\5\36"+ + "\20\2\u0114\u0119\5$\23\2\u0115\u0119\5(\25\2\u0116\u0119\5,\27\2\u0117"+ + "\u0119\5.\30\2\u0118\u0112\3\2\2\2\u0118\u0113\3\2\2\2\u0118\u0114\3\2"+ + "\2\2\u0118\u0115\3\2\2\2\u0118\u0116\3\2\2\2\u0118\u0117\3\2\2\2\u0119"+ + "\u011a\3\2\2\2\u011a\u011d\7\n\2\2\u011b\u011e\5\"\22\2\u011c\u011e\5"+ + " \21\2\u011d\u011b\3\2\2\2\u011d\u011c\3\2\2\2\u011d\u011e\3\2\2\2\u011e"+ + "\35\3\2\2\2\u011f\u0120\7\t\2\2\u0120\u0121\5\22\n\2\u0121\u0128\7\n\2"+ + "\2\u0122\u0129\5\34\17\2\u0123\u0129\5\36\20\2\u0124\u0129\5$\23\2\u0125"+ + "\u0129\5(\25\2\u0126\u0129\5,\27\2\u0127\u0129\5.\30\2\u0128\u0122\3\2"+ + "\2\2\u0128\u0123\3\2\2\2\u0128\u0124\3\2\2\2\u0128\u0125\3\2\2\2\u0128"+ + "\u0126\3\2\2\2\u0128\u0127\3\2\2\2\u0129\37\3\2\2\2\u012a\u012b\7\7\2"+ + "\2\u012b\u012c\5\30\r\2\u012c\u012f\7\b\2\2\u012d\u0130\5\"\22\2\u012e"+ + "\u0130\5 \21\2\u012f\u012d\3\2\2\2\u012f\u012e\3\2\2\2\u012f\u0130\3\2"+ + "\2\2\u0130!\3\2\2\2\u0131\u0134\7\13\2\2\u0132\u0135\5&\24\2\u0133\u0135"+ + "\5*\26\2\u0134\u0132\3\2\2\2\u0134\u0133\3\2\2\2\u0135#\3\2\2\2\u0136"+ + "\u0137\7K\2\2\u0137\u0138\5\"\22\2\u0138%\3\2\2\2\u0139\u013a\7N\2\2\u013a"+ + "\u013d\5\60\31\2\u013b\u013e\5\"\22\2\u013c\u013e\5 \21\2\u013d\u013b"+ + "\3\2\2\2\u013d\u013c\3\2\2\2\u013d\u013e\3\2\2\2\u013e\'\3\2\2\2\u013f"+ + "\u0142\7L\2\2\u0140\u0143\5\"\22\2\u0141\u0143\5 \21\2\u0142\u0140\3\2"+ + "\2\2\u0142\u0141\3\2\2\2\u0142\u0143\3\2\2\2\u0143)\3\2\2\2\u0144\u0147"+ + "\t\n\2\2\u0145\u0148\5\"\22\2\u0146\u0148\5 \21\2\u0147\u0145\3\2\2\2"+ + "\u0147\u0146\3\2\2\2\u0147\u0148\3\2\2\2\u0148+\3\2\2\2\u0149\u014a\7"+ + "\26\2\2\u014a\u015b\7K\2\2\u014b\u014e\5\60\31\2\u014c\u014f\5\"\22\2"+ + "\u014d\u014f\5 \21\2\u014e\u014c\3\2\2\2\u014e\u014d\3\2\2\2\u014e\u014f"+ + "\3\2\2\2\u014f\u015c\3\2\2\2\u0150\u0151\7\7\2\2\u0151\u0152\5\30\r\2"+ + "\u0152\u0153\7\b\2\2\u0153\u0155\3\2\2\2\u0154\u0150\3\2\2\2\u0155\u0156"+ + "\3\2\2\2\u0156\u0154\3\2\2\2\u0156\u0157\3\2\2\2\u0157\u0159\3\2\2\2\u0158"+ + "\u015a\5\"\22\2\u0159\u0158\3\2\2\2\u0159\u015a\3\2\2\2\u015a\u015c\3"+ + "\2\2\2\u015b\u014b\3\2\2\2\u015b\u0154\3\2\2\2\u015c-\3\2\2\2\u015d\u0160"+ + "\7F\2\2\u015e\u0161\5\"\22\2\u015f\u0161\5 \21\2\u0160\u015e\3\2\2\2\u0160"+ + "\u015f\3\2\2\2\u0160\u0161\3\2\2\2\u0161/\3\2\2\2\u0162\u016b\7\t\2\2"+ + "\u0163\u0168\5\30\r\2\u0164\u0165\7\f\2\2\u0165\u0167\5\30\r\2\u0166\u0164"+ + "\3\2\2\2\u0167\u016a\3\2\2\2\u0168\u0166\3\2\2\2\u0168\u0169\3\2\2\2\u0169"+ + "\u016c\3\2\2\2\u016a\u0168\3\2\2\2\u016b\u0163\3\2\2\2\u016b\u016c\3\2"+ + "\2\2\u016c\u016d\3\2\2\2\u016d\u016e\7\n\2\2\u016e\61\3\2\2\2\u016f\u0170"+ + "\t\13\2\2\u0170\63\3\2\2\2-\67BJSX\\`eimqv}\u0082\u0086\u0088\u008e\u0093"+ + "\u0097\u009e\u00a8\u00b0\u00b6\u00c0\u00de\u0104\u0106\u010f\u0118\u011d"+ + "\u0128\u012f\u0134\u013d\u0142\u0147\u014e\u0156\u0159\u015b\u0160\u0168"+ + "\u016b"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java index d731b57676b..7997b57ae6b 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java @@ -116,6 +116,13 @@ class PlanAParserBaseVisitor extends AbstractParseTreeVisitor implements P * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitEmpty(PlanAParser.EmptyContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitEmptyscope(PlanAParser.EmptyscopeContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -151,6 +158,13 @@ class PlanAParserBaseVisitor extends AbstractParseTreeVisitor implements P * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitDeclvar(PlanAParser.DeclvarContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitTrap(PlanAParser.TrapContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java index 7470f3b6ad5..326a62555e7 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java @@ -113,6 +113,12 @@ interface PlanAParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitEmpty(PlanAParser.EmptyContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#emptyscope}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitEmptyscope(PlanAParser.EmptyscopeContext ctx); /** * Visit a parse tree produced by {@link PlanAParser#initializer}. * @param ctx the parse tree @@ -143,6 +149,12 @@ interface PlanAParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitDeclvar(PlanAParser.DeclvarContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#trap}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTrap(PlanAParser.TrapContext ctx); /** * Visit a parse tree produced by the {@code comp} * labeled alternative in {@link PlanAParser#expression}. diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java index c893cd38324..d87c9a9e1c4 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java @@ -34,7 +34,7 @@ public final class PlanAPlugin extends Plugin { return "Plan A scripting language for Elasticsearch"; } - public void onModule(ScriptModule module) { + public void onModule(final ScriptModule module) { module.addScriptEngine(PlanAScriptEngineService.class); } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java index 69736f311e6..858bf21021c 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java @@ -40,117 +40,199 @@ import java.security.ProtectionDomain; import java.util.HashMap; import java.util.Map; +/** + * Implementation of a ScriptEngine for the Plan A language. + */ public class PlanAScriptEngineService extends AbstractComponent implements ScriptEngineService { + /** + * Standard name of the Plan A language. + */ public static final String NAME = "plan-a"; - // default settings, used unless otherwise specified + + /** + * Default compiler settings to be used. + */ private static final CompilerSettings DEFAULT_COMPILER_SETTINGS = new CompilerSettings(); - public static final String NUMERIC_OVERFLOW = "numeric_overflow"; + /** + * Permissions context used during compilation. + */ + private static final AccessControlContext COMPILATION_CONTEXT; - // TODO: how should custom definitions be specified? + /** + * Setup the allowed permissions. + */ + static { + final Permissions none = new Permissions(); + none.setReadOnly(); + COMPILATION_CONTEXT = new AccessControlContext(new ProtectionDomain[] { + new ProtectionDomain(null, none) + }); + } + + /** + * Used only for testing. + */ private Definition definition = null; + /** + * Used only for testing. + */ + void setDefinition(final Definition definition) { + this.definition = definition; + } + + /** + * Constructor. + * @param settings The settings to initialize the engine with. + */ @Inject - public PlanAScriptEngineService(Settings settings) { + public PlanAScriptEngineService(final Settings settings) { super(settings); } - public void setDefinition(final Definition definition) { - this.definition = new Definition(definition); - } - + /** + * Get the type name(s) for the language. + * @return Always contains only the single name of the language. + */ @Override public String[] types() { return new String[] { NAME }; } + /** + * Get the extension(s) for the language. + * @return Always contains only the single extension of the language. + */ @Override public String[] extensions() { return new String[] { NAME }; } + /** + * Whether or not the engine is secure. + * @return Always true as the engine should be secure at runtime. + */ @Override public boolean sandboxed() { return true; } - // context used during compilation - private static final AccessControlContext COMPILATION_CONTEXT; - static { - Permissions none = new Permissions(); - none.setReadOnly(); - COMPILATION_CONTEXT = new AccessControlContext(new ProtectionDomain[] { - new ProtectionDomain(null, none) - }); - } - + /** + * Compiles a Plan A script with the specified parameters. + * @param script The code to be compiled. + * @param params The params used to modify the compiler settings on a per script basis. + * @return Compiled script object represented by an {@link Executable}. + */ @Override - public Object compile(String script, Map params) { + public Object compile(final String script, final Map params) { final CompilerSettings compilerSettings; + if (params.isEmpty()) { + // Use the default settings. compilerSettings = DEFAULT_COMPILER_SETTINGS; } else { - // custom settings + // Use custom settings specified by params. compilerSettings = new CompilerSettings(); - Map clone = new HashMap<>(params); - String value = clone.remove(NUMERIC_OVERFLOW); + Map copy = new HashMap<>(params); + String value = copy.remove(CompilerSettings.NUMERIC_OVERFLOW); + if (value != null) { - // TODO: can we get a real boolean parser in here? compilerSettings.setNumericOverflow(Boolean.parseBoolean(value)); } - if (!clone.isEmpty()) { - throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + clone); + + value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER); + + if (value != null) { + compilerSettings.setMaxLoopCounter(Integer.parseInt(value)); + } + + if (!copy.isEmpty()) { + throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy); } } - // check we ourselves are not being called by unprivileged code - SecurityManager sm = System.getSecurityManager(); + + // Check we ourselves are not being called by unprivileged code. + final SecurityManager sm = System.getSecurityManager(); + if (sm != null) { sm.checkPermission(new SpecialPermission()); } - // create our loader (which loads compiled code with no permissions) - Compiler.Loader loader = AccessController.doPrivileged(new PrivilegedAction() { + + // Create our loader (which loads compiled code with no permissions). + final Compiler.Loader loader = AccessController.doPrivileged(new PrivilegedAction() { @Override public Compiler.Loader run() { return new Compiler.Loader(getClass().getClassLoader()); } }); - // drop all permissions to actually compile the code itself + + // Drop all permissions to actually compile the code itself. return AccessController.doPrivileged(new PrivilegedAction() { @Override public Executable run() { - return Compiler.compile(loader, "something", script, definition, compilerSettings); + return Compiler.compile(loader, "unknown", script, definition, compilerSettings); } }, COMPILATION_CONTEXT); } + /** + * Retrieve an {@link ExecutableScript} for later use. + * @param compiledScript A previously compiled script. + * @param vars The variables to be used in the script. + * @return An {@link ExecutableScript} with the currently specified variables. + */ @Override - public ExecutableScript executable(CompiledScript compiledScript, Map vars) { - return new ScriptImpl((Executable) compiledScript.compiled(), vars, null); + public ExecutableScript executable(final CompiledScript compiledScript, final Map vars) { + return new ScriptImpl((Executable)compiledScript.compiled(), vars, null); } + /** + * Retrieve a {@link SearchScript} for later use. + * @param compiledScript A previously compiled script. + * @param lookup The object that ultimately allows access to search fields. + * @param vars The variables to be used in the script. + * @return An {@link SearchScript} with the currently specified variables. + */ @Override - public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { + public SearchScript search(final CompiledScript compiledScript, final SearchLookup lookup, final Map vars) { return new SearchScript() { + /** + * Get the search script that will have access to search field values. + * @param context The LeafReaderContext to be used. + * @return A script that will have the search fields from the current context available for use. + */ @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { - return new ScriptImpl((Executable) compiledScript.compiled(), vars, lookup.getLeafSearchLookup(context)); + public LeafSearchScript getLeafSearchScript(final LeafReaderContext context) throws IOException { + return new ScriptImpl((Executable)compiledScript.compiled(), vars, lookup.getLeafSearchLookup(context)); } + /** + * Whether or not the score is needed. + * @return Always true as it's assumed score is needed. + */ @Override public boolean needsScores() { - return true; // TODO: maybe even do these different and more like expressions. + return true; } }; } + /** + * Action taken when a script is removed from the cache. + * @param script The removed script. + */ @Override - public void scriptRemoved(CompiledScript script) { - // nothing to do + public void scriptRemoved(final CompiledScript script) { + // Nothing to do. } + /** + * Action taken when the engine is closed. + */ @Override public void close() throws IOException { - // nothing to do + // Nothing to do. } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java index 3910cdc96f7..33fdce36d47 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java @@ -28,67 +28,128 @@ import org.elasticsearch.search.lookup.LeafSearchLookup; import java.util.HashMap; import java.util.Map; +/** + * ScriptImpl can be used as either an {@link ExecutableScript} or a {@link LeafSearchScript} + * to run a previously compiled Plan A script. + */ final class ScriptImpl implements ExecutableScript, LeafSearchScript { - final Executable executable; - final Map variables; - final LeafSearchLookup lookup; - - ScriptImpl(Executable executable, Map vars, LeafSearchLookup lookup) { + /** + * The Plan A Executable script that can be run. + */ + private final Executable executable; + + /** + * A map that can be used to access input parameters at run-time. + */ + private final Map variables; + + /** + * The lookup is used to access search field values at run-time. + */ + private final LeafSearchLookup lookup; + + /** + * Creates a ScriptImpl for the a previously compiled Plan A script. + * @param executable The previously compiled Plan A script. + * @param vars The initial variables to run the script with. + * @param lookup The lookup to allow search fields to be available if this is run as a search script. + */ + ScriptImpl(final Executable executable, final Map vars, final LeafSearchLookup lookup) { this.executable = executable; this.lookup = lookup; this.variables = new HashMap<>(); + if (vars != null) { variables.putAll(vars); } + if (lookup != null) { variables.putAll(lookup.asMap()); } } - + + /** + * Set a variable for the script to be run against. + * @param name The variable name. + * @param value The variable value. + */ @Override - public void setNextVar(String name, Object value) { + public void setNextVar(final String name, final Object value) { variables.put(name, value); } - + + /** + * Run the script. + * @return The script result. + */ @Override public Object run() { return executable.execute(variables); } - - @Override - public float runAsFloat() { - return ((Number) run()).floatValue(); - } - - @Override - public long runAsLong() { - return ((Number) run()).longValue(); - } + /** + * Run the script. + * @return The script result as a double. + */ @Override public double runAsDouble() { - return ((Number) run()).doubleValue(); + return ((Number)run()).doubleValue(); } - + + /** + * Run the script. + * @return The script result as a float. + */ @Override - public Object unwrap(Object value) { + public float runAsFloat() { + return ((Number)run()).floatValue(); + } + + /** + * Run the script. + * @return The script result as a long. + */ + @Override + public long runAsLong() { + return ((Number)run()).longValue(); + } + + /** + * This method has no effect in Plan A. + * @param value The value to unwrap. + * @return The value passed in. + */ + @Override + public Object unwrap(final Object value) { return value; } + /** + * Sets the scorer to be accessible within a script. + * @param scorer The scorer used for a search. + */ @Override - public void setScorer(Scorer scorer) { - variables.put("_score", new ScoreAccessor(scorer)); + public void setScorer(final Scorer scorer) { + variables.put("#score", new ScoreAccessor(scorer)); } + /** + * Sets the current document. + * @param doc The current document. + */ @Override - public void setDocument(int doc) { + public void setDocument(final int doc) { if (lookup != null) { lookup.setDocument(doc); } } + /** + * Sets the current source. + * @param source The current source. + */ @Override - public void setSource(Map source) { + public void setSource(final Map source) { if (lookup != null) { lookup.source().setSource(source); } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java index 3bb5ae463e7..f132d1edf2f 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java @@ -1,5 +1,3 @@ -package org.elasticsearch.plan.a; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -19,6 +17,8 @@ package org.elasticsearch.plan.a; * under the License. */ +package org.elasticsearch.plan.a; + public class Utility { public static boolean NumberToboolean(final Number value) { return value.longValue() != 0; @@ -157,7 +157,7 @@ public class Utility { } public static Character byteToCharacter(final byte value) { - return (char)(byte)value; + return (char)value; } public static Integer byteToInteger(final byte value) { @@ -193,7 +193,7 @@ public class Utility { } public static Character shortToCharacter(final short value) { - return (char)(short)value; + return (char)value; } public static Integer shortToInteger(final short value) { @@ -211,11 +211,11 @@ public class Utility { public static Double shortToDouble(final short value) { return (double)value; } - + public static boolean ShortToboolean(final Short value) { return value != 0; } - + public static char ShortTochar(final Short value) { return (char)value.shortValue(); } @@ -261,19 +261,19 @@ public class Utility { } public static int CharacterToint(final Character value) { - return (int)value; + return value; } public static long CharacterTolong(final Character value) { - return (long)value; + return value; } public static float CharacterTofloat(final Character value) { - return (float)value; + return value; } public static double CharacterTodouble(final Character value) { - return (double)value; + return value; } public static Boolean CharacterToBoolean(final Character value) { @@ -317,7 +317,7 @@ public class Utility { } public static Character intToCharacter(final int value) { - return (char)(int)value; + return (char)value; } public static Long intToLong(final int value) { @@ -331,7 +331,7 @@ public class Utility { public static Double intToDouble(final int value) { return (double)value; } - + public static boolean IntegerToboolean(final Integer value) { return value != 0; } @@ -353,7 +353,7 @@ public class Utility { } public static Character longToCharacter(final long value) { - return (char)(long)value; + return (char)value; } public static Integer longToInteger(final long value) { @@ -367,7 +367,7 @@ public class Utility { public static Double longToDouble(final long value) { return (double)value; } - + public static boolean LongToboolean(final Long value) { return value != 0; } @@ -389,7 +389,7 @@ public class Utility { } public static Character floatToCharacter(final float value) { - return (char)(float)value; + return (char)value; } public static Integer floatToInteger(final float value) { @@ -403,11 +403,11 @@ public class Utility { public static Double floatToDouble(final float value) { return (double)value; } - + public static boolean FloatToboolean(final Float value) { return value != 0; } - + public static char FloatTochar(final Float value) { return (char)value.floatValue(); } @@ -425,7 +425,7 @@ public class Utility { } public static Character doubleToCharacter(final double value) { - return (char)(double)value; + return (char)value; } public static Integer doubleToInteger(final double value) { @@ -435,23 +435,23 @@ public class Utility { public static Long doubleToLong(final double value) { return (long)value; } - + public static Float doubleToFloat(final double value) { return (float)value; } - + public static boolean DoubleToboolean(final Double value) { return value != 0; } - + public static char DoubleTochar(final Double value) { return (char)value.doubleValue(); } - + // although divide by zero is guaranteed, the special overflow case is not caught. // its not needed for remainder because it is not possible there. // see https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.17.2 - + /** * Integer divide without overflow * @throws ArithmeticException on overflow or divide-by-zero @@ -462,7 +462,7 @@ public class Utility { } return x / y; } - + /** * Long divide without overflow * @throws ArithmeticException on overflow or divide-by-zero @@ -667,7 +667,7 @@ public class Utility { } return z; } - + /** * Checks for NaN, result is NaN but operands are finite * @throws ArithmeticException if overflow occurred @@ -680,7 +680,7 @@ public class Utility { } return z; } - + /** * Checks for NaN, result is infinite but operands are finite * @throws ArithmeticException if overflow occurred @@ -693,7 +693,7 @@ public class Utility { } return z; } - + /** * Checks for NaN, result is NaN but operands are finite * @throws ArithmeticException if overflow occurred @@ -706,7 +706,7 @@ public class Utility { } return z; } - + /** * Adds two floats but throws {@code ArithmeticException} * if the result overflows. @@ -714,7 +714,7 @@ public class Utility { public static float addWithoutOverflow(float x, float y) { return checkInfFloat(x, y, x + y); } - + /** * Adds two doubles but throws {@code ArithmeticException} * if the result overflows. @@ -722,7 +722,7 @@ public class Utility { public static double addWithoutOverflow(double x, double y) { return checkInfDouble(x, y, x + y); } - + /** * Subtracts two floats but throws {@code ArithmeticException} * if the result overflows. @@ -730,7 +730,7 @@ public class Utility { public static float subtractWithoutOverflow(float x, float y) { return checkInfFloat(x, y, x - y); } - + /** * Subtracts two doubles but throws {@code ArithmeticException} * if the result overflows. @@ -738,7 +738,7 @@ public class Utility { public static double subtractWithoutOverflow(double x, double y) { return checkInfDouble(x, y , x - y); } - + /** * Multiplies two floats but throws {@code ArithmeticException} * if the result overflows. @@ -746,7 +746,7 @@ public class Utility { public static float multiplyWithoutOverflow(float x, float y) { return checkInfFloat(x, y, x * y); } - + /** * Multiplies two doubles but throws {@code ArithmeticException} * if the result overflows. @@ -754,7 +754,7 @@ public class Utility { public static double multiplyWithoutOverflow(double x, double y) { return checkInfDouble(x, y, x * y); } - + /** * Divides two floats but throws {@code ArithmeticException} * if the result overflows, or would create NaN from finite @@ -763,7 +763,7 @@ public class Utility { public static float divideWithoutOverflow(float x, float y) { return checkNaNFloat(x, y, checkInfFloat(x, y, x / y)); } - + /** * Divides two doubles but throws {@code ArithmeticException} * if the result overflows, or would create NaN from finite @@ -772,7 +772,7 @@ public class Utility { public static double divideWithoutOverflow(double x, double y) { return checkNaNDouble(x, y, checkInfDouble(x, y, x / y)); } - + /** * Takes remainder two floats but throws {@code ArithmeticException} * if the result would create NaN from finite inputs ({@code y == 0}) @@ -780,7 +780,7 @@ public class Utility { public static float remainderWithoutOverflow(float x, float y) { return checkNaNFloat(x, y, x % y); } - + /** * Divides two doubles but throws {@code ArithmeticException} * if the result would create NaN from finite inputs ({@code y == 0}) diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java index 4f3361576c4..34b5b535afa 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java @@ -21,6 +21,7 @@ package org.elasticsearch.plan.a; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTree; +import org.elasticsearch.script.ScoreAccessor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; @@ -34,11 +35,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import static org.elasticsearch.plan.a.Adapter.ExpressionMetadata; -import static org.elasticsearch.plan.a.Adapter.ExtNodeMetadata; -import static org.elasticsearch.plan.a.Adapter.ExternalMetadata; -import static org.elasticsearch.plan.a.Adapter.StatementMetadata; -import static org.elasticsearch.plan.a.Adapter.error; import static org.elasticsearch.plan.a.Definition.Cast; import static org.elasticsearch.plan.a.Definition.Constructor; import static org.elasticsearch.plan.a.Definition.Field; @@ -46,6 +42,11 @@ import static org.elasticsearch.plan.a.Definition.Method; import static org.elasticsearch.plan.a.Definition.Sort; import static org.elasticsearch.plan.a.Definition.Transform; import static org.elasticsearch.plan.a.Definition.Type; +import static org.elasticsearch.plan.a.Metadata.ExpressionMetadata; +import static org.elasticsearch.plan.a.Metadata.ExtNodeMetadata; +import static org.elasticsearch.plan.a.Metadata.ExternalMetadata; +import static org.elasticsearch.plan.a.Metadata.StatementMetadata; +import static org.elasticsearch.plan.a.Metadata.error; import static org.elasticsearch.plan.a.PlanAParser.ADD; import static org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; import static org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; @@ -69,6 +70,7 @@ import static org.elasticsearch.plan.a.PlanAParser.DecltypeContext; import static org.elasticsearch.plan.a.PlanAParser.DeclvarContext; import static org.elasticsearch.plan.a.PlanAParser.DoContext; import static org.elasticsearch.plan.a.PlanAParser.EmptyContext; +import static org.elasticsearch.plan.a.PlanAParser.EmptyscopeContext; import static org.elasticsearch.plan.a.PlanAParser.ExprContext; import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; import static org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; @@ -103,7 +105,10 @@ import static org.elasticsearch.plan.a.PlanAParser.SUB; import static org.elasticsearch.plan.a.PlanAParser.SingleContext; import static org.elasticsearch.plan.a.PlanAParser.SourceContext; import static org.elasticsearch.plan.a.PlanAParser.StatementContext; +import static org.elasticsearch.plan.a.PlanAParser.ThrowContext; +import static org.elasticsearch.plan.a.PlanAParser.TrapContext; import static org.elasticsearch.plan.a.PlanAParser.TrueContext; +import static org.elasticsearch.plan.a.PlanAParser.TryContext; import static org.elasticsearch.plan.a.PlanAParser.USH; import static org.elasticsearch.plan.a.PlanAParser.UnaryContext; import static org.elasticsearch.plan.a.PlanAParser.WhileContext; @@ -112,18 +117,13 @@ class Writer extends PlanAParserBaseVisitor { private static class Branch { final ParserRuleContext source; - Label begin; - Label end; - Label tru; - Label fals; + Label begin = null; + Label end = null; + Label tru = null; + Label fals = null; private Branch(final ParserRuleContext source) { this.source = source; - - begin = null; - end = null; - tru = null; - fals = null; } } @@ -131,200 +131,206 @@ class Writer extends PlanAParserBaseVisitor { final static String CLASS_NAME = BASE_CLASS_NAME + "$CompiledPlanAExecutable"; private final static org.objectweb.asm.Type BASE_CLASS_TYPE = org.objectweb.asm.Type.getType(Executable.class); private final static org.objectweb.asm.Type CLASS_TYPE = - org.objectweb.asm.Type.getType("L" + CLASS_NAME.replace(".", "/") + ";"); + org.objectweb.asm.Type.getType("L" + CLASS_NAME.replace(".", "/") + ";"); + + private final static org.objectweb.asm.Type PLAN_A_ERROR_TYPE = org.objectweb.asm.Type.getType(PlanAError.class); private final static org.objectweb.asm.commons.Method CONSTRUCTOR = org.objectweb.asm.commons.Method.getMethod( - "void (org.elasticsearch.plan.a.Definition, java.lang.String, java.lang.String)"); + "void (org.elasticsearch.plan.a.Definition, java.lang.String, java.lang.String)"); private final static org.objectweb.asm.commons.Method EXECUTE = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object execute(java.util.Map)"); + "java.lang.Object execute(java.util.Map)"); private final static String SIGNATURE = "(Ljava/util/Map;)Ljava/lang/Object;"; private final static org.objectweb.asm.Type DEFINITION_TYPE = org.objectweb.asm.Type.getType(Definition.class); + private final static org.objectweb.asm.Type MAP_TYPE = org.objectweb.asm.Type.getType(Map.class); + private final static org.objectweb.asm.commons.Method MAP_GET = + org.objectweb.asm.commons.Method.getMethod("Object get(Object)"); + + private final static org.objectweb.asm.Type SCORE_ACCESSOR_TYPE = org.objectweb.asm.Type.getType(ScoreAccessor.class); + private final static org.objectweb.asm.commons.Method SCORE_ACCESSOR_FLOAT = + org.objectweb.asm.commons.Method.getMethod("float floatValue()"); + private final static org.objectweb.asm.commons.Method DEF_METHOD_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object methodCall(java.lang.Object, java.lang.String, " + + "java.lang.Object methodCall(java.lang.Object, java.lang.String, " + "org.elasticsearch.plan.a.Definition, java.lang.Object[], boolean[])"); private final static org.objectweb.asm.commons.Method DEF_ARRAY_STORE = org.objectweb.asm.commons.Method.getMethod( - "void arrayStore(java.lang.Object, java.lang.Object, java.lang.Object, " + + "void arrayStore(java.lang.Object, java.lang.Object, java.lang.Object, " + "org.elasticsearch.plan.a.Definition, boolean, boolean)"); private final static org.objectweb.asm.commons.Method DEF_ARRAY_LOAD = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object arrayLoad(java.lang.Object, java.lang.Object, " + + "java.lang.Object arrayLoad(java.lang.Object, java.lang.Object, " + "org.elasticsearch.plan.a.Definition, boolean)"); private final static org.objectweb.asm.commons.Method DEF_FIELD_STORE = org.objectweb.asm.commons.Method.getMethod( - "void fieldStore(java.lang.Object, java.lang.Object, java.lang.String, " + + "void fieldStore(java.lang.Object, java.lang.Object, java.lang.String, " + "org.elasticsearch.plan.a.Definition, boolean)"); private final static org.objectweb.asm.commons.Method DEF_FIELD_LOAD = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object fieldLoad(java.lang.Object, java.lang.String, org.elasticsearch.plan.a.Definition)"); + "java.lang.Object fieldLoad(java.lang.Object, java.lang.String, org.elasticsearch.plan.a.Definition)"); private final static org.objectweb.asm.commons.Method DEF_NOT_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object not(java.lang.Object)"); + "java.lang.Object not(java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_NEG_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object neg(java.lang.Object)"); + "java.lang.Object neg(java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_MUL_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object mul(java.lang.Object, java.lang.Object)"); + "java.lang.Object mul(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_DIV_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object div(java.lang.Object, java.lang.Object)"); + "java.lang.Object div(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_REM_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object rem(java.lang.Object, java.lang.Object)"); + "java.lang.Object rem(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_ADD_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object add(java.lang.Object, java.lang.Object)"); + "java.lang.Object add(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_SUB_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object sub(java.lang.Object, java.lang.Object)"); + "java.lang.Object sub(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_LSH_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object lsh(java.lang.Object, java.lang.Object)"); + "java.lang.Object lsh(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_RSH_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object rsh(java.lang.Object, java.lang.Object)"); + "java.lang.Object rsh(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_USH_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object ush(java.lang.Object, java.lang.Object)"); + "java.lang.Object ush(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_AND_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object and(java.lang.Object, java.lang.Object)"); + "java.lang.Object and(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_XOR_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object xor(java.lang.Object, java.lang.Object)"); + "java.lang.Object xor(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_OR_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object or(java.lang.Object, java.lang.Object)"); + "java.lang.Object or(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_EQ_CALL = org.objectweb.asm.commons.Method.getMethod( - "boolean eq(java.lang.Object, java.lang.Object)"); + "boolean eq(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_LT_CALL = org.objectweb.asm.commons.Method.getMethod( - "boolean lt(java.lang.Object, java.lang.Object)"); + "boolean lt(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_LTE_CALL = org.objectweb.asm.commons.Method.getMethod( - "boolean lte(java.lang.Object, java.lang.Object)"); + "boolean lte(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_GT_CALL = org.objectweb.asm.commons.Method.getMethod( - "boolean gt(java.lang.Object, java.lang.Object)"); + "boolean gt(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_GTE_CALL = org.objectweb.asm.commons.Method.getMethod( - "boolean gte(java.lang.Object, java.lang.Object)"); + "boolean gte(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.Type STRINGBUILDER_TYPE = org.objectweb.asm.Type.getType(StringBuilder.class); private final static org.objectweb.asm.commons.Method STRINGBUILDER_CONSTRUCTOR = - org.objectweb.asm.commons.Method.getMethod("void ()"); + org.objectweb.asm.commons.Method.getMethod("void ()"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_BOOLEAN = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(boolean)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(boolean)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_CHAR = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(char)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(char)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_INT = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(int)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(int)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_LONG = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(long)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(long)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_FLOAT = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(float)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(float)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(double)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(double)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_STRING = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(java.lang.String)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(java.lang.String)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_OBJECT = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(java.lang.Object)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(java.lang.Object)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_TOSTRING = - org.objectweb.asm.commons.Method.getMethod("java.lang.String toString()"); + org.objectweb.asm.commons.Method.getMethod("java.lang.String toString()"); private final static org.objectweb.asm.commons.Method TOINTEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("int toIntExact(long)"); + org.objectweb.asm.commons.Method.getMethod("int toIntExact(long)"); private final static org.objectweb.asm.commons.Method NEGATEEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("int negateExact(int)"); + org.objectweb.asm.commons.Method.getMethod("int negateExact(int)"); private final static org.objectweb.asm.commons.Method NEGATEEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("long negateExact(long)"); + org.objectweb.asm.commons.Method.getMethod("long negateExact(long)"); private final static org.objectweb.asm.commons.Method MULEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("int multiplyExact(int, int)"); + org.objectweb.asm.commons.Method.getMethod("int multiplyExact(int, int)"); private final static org.objectweb.asm.commons.Method MULEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("long multiplyExact(long, long)"); + org.objectweb.asm.commons.Method.getMethod("long multiplyExact(long, long)"); private final static org.objectweb.asm.commons.Method ADDEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("int addExact(int, int)"); + org.objectweb.asm.commons.Method.getMethod("int addExact(int, int)"); private final static org.objectweb.asm.commons.Method ADDEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("long addExact(long, long)"); + org.objectweb.asm.commons.Method.getMethod("long addExact(long, long)"); private final static org.objectweb.asm.commons.Method SUBEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("int subtractExact(int, int)"); + org.objectweb.asm.commons.Method.getMethod("int subtractExact(int, int)"); private final static org.objectweb.asm.commons.Method SUBEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("long subtractExact(long, long)"); + org.objectweb.asm.commons.Method.getMethod("long subtractExact(long, long)"); private final static org.objectweb.asm.commons.Method CHECKEQUALS = - org.objectweb.asm.commons.Method.getMethod("boolean checkEquals(java.lang.Object, java.lang.Object)"); + org.objectweb.asm.commons.Method.getMethod("boolean checkEquals(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method TOBYTEEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("byte toByteExact(int)"); + org.objectweb.asm.commons.Method.getMethod("byte toByteExact(int)"); private final static org.objectweb.asm.commons.Method TOBYTEEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("byte toByteExact(long)"); + org.objectweb.asm.commons.Method.getMethod("byte toByteExact(long)"); private final static org.objectweb.asm.commons.Method TOBYTEWOOVERFLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("byte toByteWithoutOverflow(float)"); + org.objectweb.asm.commons.Method.getMethod("byte toByteWithoutOverflow(float)"); private final static org.objectweb.asm.commons.Method TOBYTEWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("byte toByteWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("byte toByteWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method TOSHORTEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("short toShortExact(int)"); + org.objectweb.asm.commons.Method.getMethod("short toShortExact(int)"); private final static org.objectweb.asm.commons.Method TOSHORTEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("short toShortExact(long)"); + org.objectweb.asm.commons.Method.getMethod("short toShortExact(long)"); private final static org.objectweb.asm.commons.Method TOSHORTWOOVERFLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("short toShortWithoutOverflow(float)"); + org.objectweb.asm.commons.Method.getMethod("short toShortWithoutOverflow(float)"); private final static org.objectweb.asm.commons.Method TOSHORTWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("short toShortWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("short toShortWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method TOCHAREXACT_INT = - org.objectweb.asm.commons.Method.getMethod("char toCharExact(int)"); + org.objectweb.asm.commons.Method.getMethod("char toCharExact(int)"); private final static org.objectweb.asm.commons.Method TOCHAREXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("char toCharExact(long)"); + org.objectweb.asm.commons.Method.getMethod("char toCharExact(long)"); private final static org.objectweb.asm.commons.Method TOCHARWOOVERFLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("char toCharWithoutOverflow(float)"); + org.objectweb.asm.commons.Method.getMethod("char toCharWithoutOverflow(float)"); private final static org.objectweb.asm.commons.Method TOCHARWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("char toCharWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("char toCharWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method TOINTWOOVERFLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("int toIntWithoutOverflow(float)"); + org.objectweb.asm.commons.Method.getMethod("int toIntWithoutOverflow(float)"); private final static org.objectweb.asm.commons.Method TOINTWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("int toIntWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("int toIntWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method TOLONGWOOVERFLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("long toLongExactWithoutOverflow(float)"); + org.objectweb.asm.commons.Method.getMethod("long toLongExactWithoutOverflow(float)"); private final static org.objectweb.asm.commons.Method TOLONGWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("long toLongExactWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("long toLongExactWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method TOFLOATWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("float toFloatWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("float toFloatWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method MULWOOVERLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("float multiplyWithoutOverflow(float, float)"); + org.objectweb.asm.commons.Method.getMethod("float multiplyWithoutOverflow(float, float)"); private final static org.objectweb.asm.commons.Method MULWOOVERLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("double multiplyWithoutOverflow(double, double)"); + org.objectweb.asm.commons.Method.getMethod("double multiplyWithoutOverflow(double, double)"); private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_INT = - org.objectweb.asm.commons.Method.getMethod("int divideWithoutOverflow(int, int)"); + org.objectweb.asm.commons.Method.getMethod("int divideWithoutOverflow(int, int)"); private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_LONG = - org.objectweb.asm.commons.Method.getMethod("long divideWithoutOverflow(long, long)"); + org.objectweb.asm.commons.Method.getMethod("long divideWithoutOverflow(long, long)"); private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("float divideWithoutOverflow(float, float)"); + org.objectweb.asm.commons.Method.getMethod("float divideWithoutOverflow(float, float)"); private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("double divideWithoutOverflow(double, double)"); + org.objectweb.asm.commons.Method.getMethod("double divideWithoutOverflow(double, double)"); private final static org.objectweb.asm.commons.Method REMWOOVERLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("float remainderWithoutOverflow(float, float)"); + org.objectweb.asm.commons.Method.getMethod("float remainderWithoutOverflow(float, float)"); private final static org.objectweb.asm.commons.Method REMWOOVERLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("double remainderWithoutOverflow(double, double)"); + org.objectweb.asm.commons.Method.getMethod("double remainderWithoutOverflow(double, double)"); private final static org.objectweb.asm.commons.Method ADDWOOVERLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("float addWithoutOverflow(float, float)"); + org.objectweb.asm.commons.Method.getMethod("float addWithoutOverflow(float, float)"); private final static org.objectweb.asm.commons.Method ADDWOOVERLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("double addWithoutOverflow(double, double)"); + org.objectweb.asm.commons.Method.getMethod("double addWithoutOverflow(double, double)"); private final static org.objectweb.asm.commons.Method SUBWOOVERLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("float subtractWithoutOverflow(float, float)"); + org.objectweb.asm.commons.Method.getMethod("float subtractWithoutOverflow(float, float)"); private final static org.objectweb.asm.commons.Method SUBWOOVERLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("double subtractWithoutOverflow(double, double)"); + org.objectweb.asm.commons.Method.getMethod("double subtractWithoutOverflow(double, double)"); - static byte[] write(Adapter adapter) { - Writer writer = new Writer(adapter); + static byte[] write(Metadata metadata) { + Writer writer = new Writer(metadata); return writer.getBytes(); } - private final Adapter adapter; + private final Metadata metadata; private final Definition definition; private final ParseTree root; private final String source; private final CompilerSettings settings; - private final Map branches; - private final Deque jumps; - private final Set strings; + private final Map branches = new HashMap<>(); + private final Deque jumps = new ArrayDeque<>(); + private final Set strings = new HashSet<>(); private ClassWriter writer; private GeneratorAdapter execute; - private Writer(final Adapter adapter) { - this.adapter = adapter; - definition = adapter.definition; - root = adapter.root; - source = adapter.source; - settings = adapter.settings; - - branches = new HashMap<>(); - jumps = new ArrayDeque<>(); - strings = new HashSet<>(); + private Writer(final Metadata metadata) { + this.metadata = metadata; + definition = metadata.definition; + root = metadata.root; + source = metadata.source; + settings = metadata.settings; writeBegin(); writeConstructor(); @@ -377,19 +383,39 @@ class Writer extends PlanAParserBaseVisitor { private void writeExecute() { final int access = Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC; execute = new GeneratorAdapter(access, EXECUTE, SIGNATURE, null, writer); + + final Label fals = new Label(); + final Label end = new Label(); + execute.visitVarInsn(Opcodes.ALOAD, metadata.inputValueSlot); + execute.push("#score"); + execute.invokeInterface(MAP_TYPE, MAP_GET); + execute.dup(); + execute.ifNull(fals); + execute.checkCast(SCORE_ACCESSOR_TYPE); + execute.invokeVirtual(SCORE_ACCESSOR_TYPE, SCORE_ACCESSOR_FLOAT); + execute.goTo(end); + execute.mark(fals); + execute.pop(); + execute.push(0F); + execute.mark(end); + execute.visitVarInsn(Opcodes.FSTORE, metadata.scoreValueSlot); + + execute.push(settings.getMaxLoopCounter()); + execute.visitVarInsn(Opcodes.ISTORE, metadata.loopCounterSlot); + visit(root); execute.endMethod(); } @Override public Void visitSource(final SourceContext ctx) { - final StatementMetadata sourcesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); for (final StatementContext sctx : ctx.statement()) { visit(sctx); } - if (!sourcesmd.allReturn) { + if (!sourcesmd.methodEscape) { execute.visitInsn(Opcodes.ACONST_NULL); execute.returnValue(); } @@ -408,11 +434,11 @@ class Writer extends PlanAParserBaseVisitor { visit(exprctx); final BlockContext blockctx0 = ctx.block(0); - final StatementMetadata blockmd0 = adapter.getStatementMetadata(blockctx0); + final StatementMetadata blockmd0 = metadata.getStatementMetadata(blockctx0); visit(blockctx0); if (els) { - if (!blockmd0.allExit) { + if (!blockmd0.allLast) { execute.goTo(branch.end); } @@ -438,15 +464,20 @@ class Writer extends PlanAParserBaseVisitor { visit(exprctx); final BlockContext blockctx = ctx.block(); - boolean allexit = false; + boolean allLast = false; if (blockctx != null) { - StatementMetadata blocksmd = adapter.getStatementMetadata(blockctx); - allexit = blocksmd.allExit; + final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + allLast = blocksmd.allLast; + writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1); visit(blockctx); + } else if (ctx.empty() != null) { + writeLoopCounter(1); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); } - if (!allexit) { + if (!allLast) { execute.goTo(branch.begin); } @@ -460,23 +491,21 @@ class Writer extends PlanAParserBaseVisitor { public Void visitDo(final DoContext ctx) { final ExpressionContext exprctx = ctx.expression(); final Branch branch = markBranch(ctx, exprctx); + Label start = new Label(); branch.begin = new Label(); branch.end = new Label(); branch.fals = branch.end; + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + jumps.push(branch); + execute.mark(start); + visit(blockctx); execute.mark(branch.begin); - - final BlockContext bctx = ctx.block(); - final StatementMetadata blocksmd = adapter.getStatementMetadata(bctx); - visit(bctx); - visit(exprctx); - - if (!blocksmd.allExit) { - execute.goTo(branch.begin); - } - + writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1); + execute.goTo(start); execute.mark(branch.end); jumps.pop(); @@ -506,12 +535,24 @@ class Writer extends PlanAParserBaseVisitor { } final BlockContext blockctx = ctx.block(); - boolean allexit = false; + boolean allLast = false; if (blockctx != null) { - StatementMetadata blocksmd = adapter.getStatementMetadata(blockctx); - allexit = blocksmd.allExit; + StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + allLast = blocksmd.allLast; + + int count = blocksmd.count > 0 ? blocksmd.count : 1; + + if (atctx != null) { + ++count; + } + + writeLoopCounter(count); visit(blockctx); + } else if (ctx.empty() != null) { + writeLoopCounter(1); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); } if (atctx != null) { @@ -519,7 +560,7 @@ class Writer extends PlanAParserBaseVisitor { visit(atctx); } - if (atctx != null || !allexit) { + if (atctx != null || !allLast) { execute.goTo(start); } @@ -560,14 +601,56 @@ class Writer extends PlanAParserBaseVisitor { return null; } + @Override + public Void visitTry(final TryContext ctx) { + final TrapContext[] trapctxs = new TrapContext[ctx.trap().size()]; + ctx.trap().toArray(trapctxs); + final Branch branch = markBranch(ctx, trapctxs); + + Label end = new Label(); + branch.begin = new Label(); + branch.end = new Label(); + branch.tru = trapctxs.length > 1 ? end : null; + + execute.mark(branch.begin); + + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + visit(blockctx); + + if (!blocksmd.allLast) { + execute.goTo(end); + } + + execute.mark(branch.end); + + for (final TrapContext trapctx : trapctxs) { + visit(trapctx); + } + + if (!blocksmd.allLast || trapctxs.length > 1) { + execute.mark(end); + } + + return null; + } + + @Override + public Void visitThrow(final ThrowContext ctx) { + visit(ctx.expression()); + execute.throwException(); + + return null; + } + @Override public Void visitExpr(final ExprContext ctx) { - final StatementMetadata exprsmd = adapter.getStatementMetadata(ctx); + final StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); final ExpressionContext exprctx = ctx.expression(); - final ExpressionMetadata expremd = adapter.getExpressionMetadata(exprctx); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); visit(exprctx); - if (exprsmd.allReturn) { + if (exprsmd.methodEscape) { execute.returnValue(); } else { writePop(expremd.to.type.getSize()); @@ -605,7 +688,9 @@ class Writer extends PlanAParserBaseVisitor { if (declctx != null) { visit(declctx); } else if (exprctx != null) { + final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); visit(exprctx); + writePop(expremd.to.type.getSize()); } else { throw new IllegalStateException(error(ctx) + "Unexpected writer state."); } @@ -615,7 +700,10 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitAfterthought(AfterthoughtContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); visit(ctx.expression()); + writePop(expremd.to.type.getSize()); return null; } @@ -636,7 +724,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitDeclvar(final DeclvarContext ctx) { - final ExpressionMetadata declvaremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); final org.objectweb.asm.Type type = declvaremd.to.type; final Sort sort = declvaremd.to.sort; final int slot = (int)declvaremd.postConst; @@ -666,6 +754,34 @@ class Writer extends PlanAParserBaseVisitor { return null; } + @Override + public Void visitTrap(final TrapContext ctx) { + final StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); + + final Branch branch = getBranch(ctx); + final Label jump = new Label(); + + final BlockContext blockctx = ctx.block(); + final EmptyscopeContext emptyctx = ctx.emptyscope(); + + execute.mark(jump); + writeLoadStoreVariable(ctx, true, trapsmd.exception, trapsmd.slot); + + if (blockctx != null) { + visit(ctx.block()); + } else if (emptyctx == null) { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + execute.visitTryCatchBlock(branch.begin, branch.end, jump, trapsmd.exception.type.getInternalName()); + + if (branch.tru != null && !trapsmd.allLast) { + execute.goTo(branch.tru); + } + + return null; + } + @Override public Void visitPrecedence(final PrecedenceContext ctx) { throw new UnsupportedOperationException(error(ctx) + "Unexpected writer state."); @@ -673,7 +789,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitNumeric(final NumericContext ctx) { - final ExpressionMetadata numericemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); final Object postConst = numericemd.postConst; if (postConst == null) { @@ -690,7 +806,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitChar(final CharContext ctx) { - final ExpressionMetadata charemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); final Object postConst = charemd.postConst; if (postConst == null) { @@ -707,7 +823,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitTrue(final TrueContext ctx) { - final ExpressionMetadata trueemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); final Object postConst = trueemd.postConst; final Branch branch = getBranch(ctx); @@ -727,7 +843,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitFalse(final FalseContext ctx) { - final ExpressionMetadata falseemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); final Object postConst = falseemd.postConst; final Branch branch = getBranch(ctx); @@ -747,7 +863,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitNull(final NullContext ctx) { - final ExpressionMetadata nullemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); execute.visitInsn(Opcodes.ACONST_NULL); checkWriteCast(nullemd); @@ -758,7 +874,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExternal(final ExternalContext ctx) { - final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -769,7 +885,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitPostinc(final PostincContext ctx) { - final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -779,7 +895,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitPreinc(final PreincContext ctx) { - final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -789,7 +905,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitUnary(final UnaryContext ctx) { - final ExpressionMetadata unaryemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); final Object postConst = unaryemd.postConst; final Object preConst = unaryemd.preConst; final Branch branch = getBranch(ctx); @@ -891,7 +1007,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitCast(final CastContext ctx) { - final ExpressionMetadata castemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); final Object postConst = castemd.postConst; if (postConst == null) { @@ -908,7 +1024,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitBinary(final BinaryContext ctx) { - final ExpressionMetadata binaryemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); final Object postConst = binaryemd.postConst; final Object preConst = binaryemd.preConst; final Branch branch = getBranch(ctx); @@ -930,7 +1046,7 @@ class Writer extends PlanAParserBaseVisitor { } final ExpressionContext exprctx0 = ctx.expression(0); - final ExpressionMetadata expremd0 = adapter.getExpressionMetadata(exprctx0); + final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); strings.add(exprctx0); visit(exprctx0); @@ -940,7 +1056,7 @@ class Writer extends PlanAParserBaseVisitor { } final ExpressionContext exprctx1 = ctx.expression(1); - final ExpressionMetadata expremd1 = adapter.getExpressionMetadata(exprctx1); + final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); strings.add(exprctx1); visit(exprctx1); @@ -990,7 +1106,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitComp(final CompContext ctx) { - final ExpressionMetadata compemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); final Object postConst = compemd.postConst; final Object preConst = compemd.preConst; final Branch branch = getBranch(ctx); @@ -1014,10 +1130,10 @@ class Writer extends PlanAParserBaseVisitor { } } else { final ExpressionContext exprctx0 = ctx.expression(0); - final ExpressionMetadata expremd0 = adapter.getExpressionMetadata(exprctx0); + final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); final ExpressionContext exprctx1 = ctx.expression(1); - final ExpressionMetadata expremd1 = adapter.getExpressionMetadata(exprctx1); + final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); final org.objectweb.asm.Type type = expremd1.to.type; final Sort sort1 = expremd1.to.sort; @@ -1033,9 +1149,9 @@ class Writer extends PlanAParserBaseVisitor { final Label end = new Label(); final boolean eq = (ctx.EQ() != null || ctx.EQR() != null) && (tru || !fals) || - (ctx.NE() != null || ctx.NER() != null) && fals; + (ctx.NE() != null || ctx.NER() != null) && fals; final boolean ne = (ctx.NE() != null || ctx.NER() != null) && (tru || !fals) || - (ctx.EQ() != null || ctx.EQR() != null) && fals; + (ctx.EQ() != null || ctx.EQR() != null) && fals; final boolean lt = ctx.LT() != null && (tru || !fals) || ctx.GTE() != null && fals; final boolean lte = ctx.LTE() != null && (tru || !fals) || ctx.GT() != null && fals; final boolean gt = ctx.GT() != null && (tru || !fals) || ctx.LTE() != null && fals; @@ -1156,7 +1272,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitBool(final BoolContext ctx) { - final ExpressionMetadata boolemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); final Object postConst = boolemd.postConst; final Object preConst = boolemd.preConst; final Branch branch = getBranch(ctx); @@ -1255,7 +1371,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitConditional(final ConditionalContext ctx) { - final ExpressionMetadata condemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); final Branch branch = getBranch(ctx); final ExpressionContext expr0 = ctx.expression(0); @@ -1286,7 +1402,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitAssignment(final AssignmentContext ctx) { - final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -1296,10 +1412,10 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtstart(ExtstartContext ctx) { - final ExternalMetadata startemd = adapter.getExternalMetadata(ctx); + final ExternalMetadata startemd = metadata.getExternalMetadata(ctx); if (startemd.token == ADD) { - final ExpressionMetadata storeemd = adapter.getExpressionMetadata(startemd.storeExpr); + final ExpressionMetadata storeemd = metadata.getExpressionMetadata(startemd.storeExpr); if (startemd.current.sort == Sort.STRING || storeemd.from.sort == Sort.STRING) { writeNewStrings(); @@ -1372,7 +1488,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtcast(final ExtcastContext ctx) { - ExtNodeMetadata castenmd = adapter.getExtNodeMetadata(ctx); + ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); final ExtprecContext precctx = ctx.extprec(); final ExtcastContext castctx = ctx.extcast(); @@ -1404,7 +1520,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtbrace(final ExtbraceContext ctx) { - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); visit(exprctx); writeLoadStoreExternal(ctx); @@ -1508,7 +1624,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtstring(ExtstringContext ctx) { - final ExtNodeMetadata stringenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata stringenmd = metadata.getExtNodeMetadata(ctx); writeConstant(ctx, stringenmd.target); @@ -1531,7 +1647,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitIncrement(IncrementContext ctx) { - final ExpressionMetadata incremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); final Object postConst = incremd.postConst; if (postConst == null) { @@ -1546,6 +1662,18 @@ class Writer extends PlanAParserBaseVisitor { return null; } + private void writeLoopCounter(final int count) { + final Label end = new Label(); + + execute.iinc(metadata.loopCounterSlot, -count); + execute.visitVarInsn(Opcodes.ILOAD, metadata.loopCounterSlot); + execute.push(0); + execute.ifICmp(GeneratorAdapter.GT, end); + execute.throwException(PLAN_A_ERROR_TYPE, + "The maximum number of statements that can be executed in a loop has been reached."); + execute.mark(end); + } + private void writeConstant(final ParserRuleContext source, final Object constant) { if (constant instanceof Number) { writeNumeric(source, constant); @@ -1618,12 +1746,12 @@ class Writer extends PlanAParserBaseVisitor { private void writeBinaryInstruction(final ParserRuleContext source, final Type type, final int token) { final Sort sort = type.sort; final boolean exact = !settings.getNumericOverflow() && - ((sort == Sort.INT || sort == Sort.LONG) && + ((sort == Sort.INT || sort == Sort.LONG) && (token == MUL || token == DIV || token == ADD || token == SUB) || (sort == Sort.FLOAT || sort == Sort.DOUBLE) && - (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)); + (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)); - // if its a 64-bit shift, fixup the last argument to truncate to 32-bits + // if its a 64-bit shift, fixup the lastSource argument to truncate to 32-bits // note unlike java, this means we still do binary promotion of shifts, // but it keeps things simple -- this check works because we promote shifts. if (sort == Sort.LONG && (token == LSH || token == USH || token == RSH)) { @@ -1683,7 +1811,7 @@ class Writer extends PlanAParserBaseVisitor { } } else { if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) && - (token == LSH || token == USH || token == RSH || token == BWAND || token == BWXOR || token == BWOR)) { + (token == LSH || token == USH || token == RSH || token == BWAND || token == BWXOR || token == BWOR)) { throw new IllegalStateException(error(source) + "Unexpected writer state."); } @@ -1731,122 +1859,122 @@ class Writer extends PlanAParserBaseVisitor { * @return true if an instruction is written, false otherwise */ private boolean writeExactInstruction(final Sort osort, final Sort psort) { - if (psort == Sort.DOUBLE) { - if (osort == Sort.FLOAT) { - execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); - } else if (osort == Sort.FLOAT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); - execute.checkCast(definition.floatobjType.type); - } else if (osort == Sort.LONG) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); - } else if (osort == Sort.LONG_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); - execute.checkCast(definition.longobjType.type); - } else if (osort == Sort.INT) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); - } else if (osort == Sort.INT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); - execute.checkCast(definition.intobjType.type); - } else if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (psort == Sort.FLOAT) { - if (osort == Sort.LONG) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); - } else if (osort == Sort.LONG_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); - execute.checkCast(definition.longobjType.type); - } else if (osort == Sort.INT) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); - } else if (osort == Sort.INT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); - execute.checkCast(definition.intobjType.type); - } else if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (psort == Sort.LONG) { - if (osort == Sort.INT) { - execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); - } else if (osort == Sort.INT_OBJ) { - execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); - execute.checkCast(definition.intobjType.type); - } else if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (psort == Sort.INT) { - if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } + if (psort == Sort.DOUBLE) { + if (osort == Sort.FLOAT) { + execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); + } else if (osort == Sort.FLOAT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); + execute.checkCast(definition.floatobjType.type); + } else if (osort == Sort.LONG) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); + } else if (osort == Sort.LONG_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); + execute.checkCast(definition.longobjType.type); + } else if (osort == Sort.INT) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); + execute.checkCast(definition.byteobjType.type); } else { return false; } + } else if (psort == Sort.FLOAT) { + if (osort == Sort.LONG) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); + } else if (osort == Sort.LONG_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); + execute.checkCast(definition.longobjType.type); + } else if (osort == Sort.INT) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else if (psort == Sort.LONG) { + if (osort == Sort.INT) { + execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else if (psort == Sort.INT) { + if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else { + return false; + } return true; } private void writeLoadStoreExternal(final ParserRuleContext source) { - final ExtNodeMetadata sourceenmd = adapter.getExtNodeMetadata(source); - final ExternalMetadata parentemd = adapter.getExternalMetadata(sourceenmd.parent); + final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); + final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); final boolean length = "#length".equals(sourceenmd.target); final boolean array = "#brace".equals(sourceenmd.target); @@ -1868,7 +1996,7 @@ class Writer extends PlanAParserBaseVisitor { if (length) { execute.arrayLength(); } else if (sourceenmd.last && parentemd.storeExpr != null) { - final ExpressionMetadata expremd = adapter.getExpressionMetadata(parentemd.storeExpr); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); final boolean cat = strings.contains(parentemd.storeExpr); if (cat) { @@ -1934,7 +2062,7 @@ class Writer extends PlanAParserBaseVisitor { boolean exact = false; if (!settings.getNumericOverflow() && expremd.typesafe && sourceenmd.type.sort != Sort.DEF && - (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)) { + (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)) { exact = writeExactInstruction(sourceenmd.type.sort, sourceenmd.promote.sort); } @@ -1973,7 +2101,7 @@ class Writer extends PlanAParserBaseVisitor { final boolean store, final boolean variable, final boolean field, final boolean name, final boolean array, final boolean shortcut) { - final ExtNodeMetadata sourceemd = adapter.getExtNodeMetadata(source); + final ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); if (variable) { writeLoadStoreVariable(source, store, sourceemd.type, (int)sourceemd.target); @@ -2030,9 +2158,9 @@ class Writer extends PlanAParserBaseVisitor { private void writeLoadStoreField(final ParserRuleContext source, final boolean store, final String name) { if (store) { - final ExtNodeMetadata sourceemd = adapter.getExtNodeMetadata(source); - final ExternalMetadata parentemd = adapter.getExternalMetadata(sourceemd.parent); - final ExpressionMetadata expremd = adapter.getExpressionMetadata(parentemd.storeExpr); + final ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); + final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceemd.parent); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); execute.push(name); execute.loadThis(); @@ -2054,12 +2182,12 @@ class Writer extends PlanAParserBaseVisitor { if (type.sort == Sort.DEF) { final ExtbraceContext bracectx = (ExtbraceContext)source; - final ExpressionMetadata expremd0 = adapter.getExpressionMetadata(bracectx.expression()); + final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(bracectx.expression()); if (store) { - final ExtNodeMetadata braceenmd = adapter.getExtNodeMetadata(bracectx); - final ExternalMetadata parentemd = adapter.getExternalMetadata(braceenmd.parent); - final ExpressionMetadata expremd1 = adapter.getExpressionMetadata(parentemd.storeExpr); + final ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(bracectx); + final ExternalMetadata parentemd = metadata.getExternalMetadata(braceenmd.parent); + final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(parentemd.storeExpr); execute.loadThis(); execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); @@ -2118,8 +2246,8 @@ class Writer extends PlanAParserBaseVisitor { } private void writeNewExternal(final ExtnewContext source) { - final ExtNodeMetadata sourceenmd = adapter.getExtNodeMetadata(source); - final ExternalMetadata parentemd = adapter.getExternalMetadata(sourceenmd.parent); + final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); + final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); final boolean makearray = "#makearray".equals(sourceenmd.target); final boolean constructor = sourceenmd.target instanceof Constructor; @@ -2155,7 +2283,7 @@ class Writer extends PlanAParserBaseVisitor { } private void writeCallExternal(final ExtcallContext source) { - final ExtNodeMetadata sourceenmd = adapter.getExtNodeMetadata(source); + final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); final boolean method = sourceenmd.target instanceof Method; final boolean def = sourceenmd.target instanceof String; @@ -2205,7 +2333,7 @@ class Writer extends PlanAParserBaseVisitor { for (int argument = 0; argument < arguments.size(); ++argument) { execute.dup(); execute.push(argument); - execute.push(adapter.getExpressionMetadata(arguments.get(argument)).typesafe); + execute.push(metadata.getExpressionMetadata(arguments.get(argument)).typesafe); execute.arrayStore(definition.booleanType.type); } diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicAPITests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicAPITests.java new file mode 100644 index 00000000000..d81c4029a79 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicAPITests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +public class BasicAPITests extends ScriptTestCase { + + public void testListIterator() { + assertEquals(3, exec("List x = new ArrayList(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + + "int total = 0; while (y.hasNext()) total += y.next(); return total;")); + assertEquals(3, exec("List x = new ArrayList(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + assertEquals("abc", exec("List x = new ArrayList(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + + "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); + assertEquals(3, exec("def x = new ArrayList(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + + "def total = 0; while (y.hasNext()) total += y.next(); return total;")); + } + + public void testSetIterator() { + assertEquals(3, exec("Set x = new HashSet(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + + "int total = 0; while (y.hasNext()) total += y.next(); return total;")); + assertEquals(3, exec("Set x = new HashSet(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + assertEquals("abc", exec("Set x = new HashSet(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + + "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); + assertEquals(3, exec("def x = new HashSet(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + + "def total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + } + + public void testMapIterator() { + assertEquals(3, exec("Map x = new HashMap(); x.put(2, 2); x.put(3, 3); x.put(-2, -2); Iterator y = x.keySet().iterator(); " + + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + assertEquals(3, exec("Map x = new HashMap(); x.put(2, 2); x.put(3, 3); x.put(-2, -2); Iterator y = x.values().iterator(); " + + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java index 4603a669df2..2f8966d581f 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java @@ -24,14 +24,14 @@ import java.util.Map; /** Tests floating point overflow with numeric overflow disabled */ public class FloatOverflowDisabledTests extends ScriptTestCase { - + /** wire overflow to false for all tests */ @Override public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "false")); + return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "false")); } - public void testAssignmentAdditionOverflow() { + public void testAssignmentAdditionOverflow() { // float try { exec("float x = 3.4028234663852886E38f; x += 3.4028234663852886E38f; return x;"); @@ -41,7 +41,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = -3.4028234663852886E38f; x += -3.4028234663852886E38f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double try { exec("double x = 1.7976931348623157E308; x += 1.7976931348623157E308; return x;"); @@ -52,8 +52,8 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - - public void testAssignmentSubtractionOverflow() { + + public void testAssignmentSubtractionOverflow() { // float try { exec("float x = 3.4028234663852886E38f; x -= -3.4028234663852886E38f; return x;"); @@ -63,7 +63,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = -3.4028234663852886E38f; x -= 3.4028234663852886E38f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double try { exec("double x = 1.7976931348623157E308; x -= -1.7976931348623157E308; return x;"); @@ -74,7 +74,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testAssignmentMultiplicationOverflow() { // float try { @@ -85,7 +85,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = 3.4028234663852886E38f; x *= -3.4028234663852886E38f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double try { exec("double x = 1.7976931348623157E308; x *= 1.7976931348623157E308; return x;"); @@ -96,7 +96,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testAssignmentDivisionOverflow() { // float try { @@ -111,7 +111,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = 1.0f; x /= 0.0f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double try { exec("double x = 1.7976931348623157E308; x /= 4.9E-324; return x;"); @@ -137,7 +137,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testAdditionConst() throws Exception { try { exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;"); @@ -148,7 +148,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testSubtraction() throws Exception { try { exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;"); @@ -159,7 +159,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testSubtractionConst() throws Exception { try { exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;"); @@ -170,7 +170,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testMultiplication() throws Exception { try { exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;"); @@ -181,7 +181,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testMultiplicationConst() throws Exception { try { exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;"); @@ -211,7 +211,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testDivisionConst() throws Exception { try { exec("return 3.4028234663852886E38f / 1.401298464324817E-45f;"); @@ -230,7 +230,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testDivisionNaN() throws Exception { // float division, constant division, and assignment try { @@ -245,7 +245,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = 0f; x /= 0f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double division, constant division, and assignment try { exec("double x = 0.0; double y = 0.0; return x / y;"); @@ -260,7 +260,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testRemainderNaN() throws Exception { // float division, constant division, and assignment try { @@ -275,7 +275,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = 1f; x %= 0f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double division, constant division, and assignment try { exec("double x = 1.0; double y = 0.0; return x % y;"); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java index 02a738de71e..c858e211faa 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java @@ -24,49 +24,49 @@ import java.util.Map; /** Tests floating point overflow with numeric overflow enabled */ public class FloatOverflowEnabledTests extends ScriptTestCase { - + /** wire overflow to true for all tests */ @Override public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "true")); + return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "true")); } - public void testAssignmentAdditionOverflow() { + public void testAssignmentAdditionOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x += 3.4028234663852886E38f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; x += -3.4028234663852886E38f; return x;")); - + // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x += 1.7976931348623157E308; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; x += -1.7976931348623157E308; return x;")); } - - public void testAssignmentSubtractionOverflow() { + + public void testAssignmentSubtractionOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x -= -3.4028234663852886E38f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; x -= 3.4028234663852886E38f; return x;")); - + // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x -= -1.7976931348623157E308; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; x -= 1.7976931348623157E308; return x;")); } - + public void testAssignmentMultiplicationOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x *= 3.4028234663852886E38f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x *= -3.4028234663852886E38f; return x;")); - + // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x *= 1.7976931348623157E308; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = 1.7976931348623157E308; x *= -1.7976931348623157E308; return x;")); } - + public void testAssignmentDivisionOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x /= 1.401298464324817E-45f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x /= -1.401298464324817E-45f; return x;")); assertEquals(Float.POSITIVE_INFINITY, exec("float x = 1.0f; x /= 0.0f; return x;")); - + // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x /= 4.9E-324; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = 1.7976931348623157E308; x /= -4.9E-324; return x;")); @@ -74,32 +74,32 @@ public class FloatOverflowEnabledTests extends ScriptTestCase { } public void testAddition() throws Exception { - assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x + y;")); + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x + y;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x + y;")); } - + public void testAdditionConst() throws Exception { - assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;")); + assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 + 1.7976931348623157E308;")); } - + public void testSubtraction() throws Exception { - assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;")); + assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;")); } - + public void testSubtractionConst() throws Exception { - assertEquals(Float.NEGATIVE_INFINITY, exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;")); + assertEquals(Float.NEGATIVE_INFINITY, exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;")); assertEquals(Double.NEGATIVE_INFINITY, exec("return -1.7976931348623157E308 - 1.7976931348623157E308;")); } - + public void testMultiplication() throws Exception { - assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;")); + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x * y;")); } - + public void testMultiplicationConst() throws Exception { - assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;")); + assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 * 1.7976931348623157E308;")); } @@ -109,32 +109,32 @@ public class FloatOverflowEnabledTests extends ScriptTestCase { assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 4.9E-324; return x / y;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.0; double y = 0.0; return x / y;")); } - + public void testDivisionConst() throws Exception { assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f / 1.401298464324817E-45f;")); assertEquals(Float.POSITIVE_INFINITY, exec("return 1.0f / 0.0f;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 / 4.9E-324;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.0 / 0.0;")); } - + public void testDivisionNaN() throws Exception { // float division, constant division, and assignment assertTrue(Float.isNaN((Float) exec("float x = 0f; float y = 0f; return x / y;"))); assertTrue(Float.isNaN((Float) exec("return 0f / 0f;"))); assertTrue(Float.isNaN((Float) exec("float x = 0f; x /= 0f; return x;"))); - + // double division, constant division, and assignment assertTrue(Double.isNaN((Double) exec("double x = 0.0; double y = 0.0; return x / y;"))); assertTrue(Double.isNaN((Double) exec("return 0.0 / 0.0;"))); assertTrue(Double.isNaN((Double) exec("double x = 0.0; x /= 0.0; return x;"))); } - + public void testRemainderNaN() throws Exception { // float division, constant division, and assignment assertTrue(Float.isNaN((Float) exec("float x = 1f; float y = 0f; return x % y;"))); assertTrue(Float.isNaN((Float) exec("return 1f % 0f;"))); assertTrue(Float.isNaN((Float) exec("float x = 1f; x %= 0f; return x;"))); - + // double division, constant division, and assignment assertTrue(Double.isNaN((Double) exec("double x = 1.0; double y = 0.0; return x % y;"))); assertTrue(Double.isNaN((Double) exec("return 1.0 % 0.0;"))); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java index dbffb11f0d0..86b4f46a765 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java @@ -24,11 +24,11 @@ import java.util.Map; /** Tests integer overflow with numeric overflow disabled */ public class IntegerOverflowDisabledTests extends ScriptTestCase { - + /** wire overflow to true for all tests */ @Override public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "false")); + return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "false")); } public void testAssignmentAdditionOverflow() { @@ -42,7 +42,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("byte x = 0; x += -129; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // short try { exec("short x = 0; x += 32768; return x;"); @@ -53,7 +53,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("byte x = 0; x += -32769; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // char try { exec("char x = 0; x += 65536; return x;"); @@ -64,7 +64,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("char x = 0; x += -65536; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // int try { exec("int x = 1; x += 2147483647; return x;"); @@ -75,7 +75,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("int x = -2; x += -2147483647; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = 1; x += 9223372036854775807L; return x;"); @@ -87,7 +87,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testAssignmentSubtractionOverflow() { // byte try { @@ -99,7 +99,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("byte x = 0; x -= 129; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // short try { exec("short x = 0; x -= -32768; return x;"); @@ -110,7 +110,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("byte x = 0; x -= 32769; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // char try { exec("char x = 0; x -= -65536; return x;"); @@ -121,7 +121,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("char x = 0; x -= 65536; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // int try { exec("int x = 1; x -= -2147483647; return x;"); @@ -132,7 +132,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("int x = -2; x -= 2147483647; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = 1; x -= -9223372036854775807L; return x;"); @@ -144,7 +144,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testAssignmentMultiplicationOverflow() { // byte try { @@ -156,7 +156,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("byte x = 2; x *= -128; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // char try { exec("char x = 2; x *= 65536; return x;"); @@ -167,7 +167,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("char x = 2; x *= -65536; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // int try { exec("int x = 2; x *= 2147483647; return x;"); @@ -178,7 +178,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("int x = 2; x *= -2147483647; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = 2; x *= 9223372036854775807L; return x;"); @@ -190,7 +190,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testAssignmentDivisionOverflow() { // byte try { @@ -203,108 +203,108 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("short x = (short) -32768; x /= -1; return x;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + // cannot happen for char: unsigned - + // int try { exec("int x = -2147483647 - 1; x /= -1; return x;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = -9223372036854775807L - 1L; x /=-1L; return x;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testIncrementOverFlow() throws Exception { // byte try { exec("byte x = 127; ++x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("byte x = 127; x++; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("byte x = (byte) -128; --x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("byte x = (byte) -128; x--; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // short try { exec("short x = 32767; ++x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("short x = 32767; x++; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("short x = (short) -32768; --x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("short x = (short) -32768; x--; return x;"); } catch (ArithmeticException expected) {} - + // char try { exec("char x = 65535; ++x; return x;"); } catch (ArithmeticException expected) {} - + try { exec("char x = 65535; x++; return x;"); } catch (ArithmeticException expected) {} - + try { exec("char x = (char) 0; --x; return x;"); } catch (ArithmeticException expected) {} - + try { exec("char x = (char) 0; x--; return x;"); } catch (ArithmeticException expected) {} - + // int try { exec("int x = 2147483647; ++x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("int x = 2147483647; x++; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("int x = (int) -2147483648L; --x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("int x = (int) -2147483648L; x--; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = 9223372036854775807L; ++x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = 9223372036854775807L; x++; return x;"); fail("did not get expected exception"); @@ -320,68 +320,68 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testAddition() throws Exception { try { exec("int x = 2147483647; int y = 2147483647; return x + y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testAdditionConst() throws Exception { try { exec("return 2147483647 + 2147483647;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("return 9223372036854775807L + 9223372036854775807L;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - - + + public void testSubtraction() throws Exception { try { exec("int x = -10; int y = 2147483647; return x - y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = -10L; long y = 9223372036854775807L; return x - y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testSubtractionConst() throws Exception { try { exec("return -10 - 2147483647;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("return -10L - 9223372036854775807L;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testMultiplication() throws Exception { try { exec("int x = 2147483647; int y = 2147483647; return x * y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testMultiplicationConst() throws Exception { try { exec("return 2147483647 * 2147483647;"); @@ -399,13 +399,13 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("int x = -2147483647 - 1; int y = -1; return x / y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = -9223372036854775808L; long y = -1L; return x / y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testDivisionConst() throws Exception { try { exec("return (-2147483648) / -1;"); @@ -417,25 +417,25 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testNegationOverflow() throws Exception { try { exec("int x = -2147483648; x = -x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = -9223372036854775808L; x = -x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testNegationOverflowConst() throws Exception { try { exec("int x = -(-2147483648); return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = -(-9223372036854775808L); return x;"); fail("did not get expected exception"); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java index cdab0e89fe6..ab5f82fd6d2 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java @@ -24,148 +24,148 @@ import java.util.Map; /** Tests integer overflow with numeric overflow enabled */ public class IntegerOverflowEnabledTests extends ScriptTestCase { - + /** wire overflow to true for all tests */ @Override public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "true")); + return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "true")); } public void testAssignmentAdditionOverflow() { // byte assertEquals((byte)(0 + 128), exec("byte x = 0; x += 128; return x;")); assertEquals((byte)(0 + -129), exec("byte x = 0; x += -129; return x;")); - + // short assertEquals((short)(0 + 32768), exec("short x = 0; x += 32768; return x;")); assertEquals((short)(0 + -32769), exec("short x = 0; x += -32769; return x;")); - + // char assertEquals((char)(0 + 65536), exec("char x = 0; x += 65536; return x;")); assertEquals((char)(0 + -65536), exec("char x = 0; x += -65536; return x;")); - + // int assertEquals(1 + 2147483647, exec("int x = 1; x += 2147483647; return x;")); assertEquals(-2 + -2147483647, exec("int x = -2; x += -2147483647; return x;")); - + // long assertEquals(1L + 9223372036854775807L, exec("long x = 1; x += 9223372036854775807L; return x;")); assertEquals(-2L + -9223372036854775807L, exec("long x = -2; x += -9223372036854775807L; return x;")); } - + public void testAssignmentSubtractionOverflow() { // byte assertEquals((byte)(0 - -128), exec("byte x = 0; x -= -128; return x;")); assertEquals((byte)(0 - 129), exec("byte x = 0; x -= 129; return x;")); - + // short assertEquals((short)(0 - -32768), exec("short x = 0; x -= -32768; return x;")); assertEquals((short)(0 - 32769), exec("short x = 0; x -= 32769; return x;")); - + // char assertEquals((char)(0 - -65536), exec("char x = 0; x -= -65536; return x;")); assertEquals((char)(0 - 65536), exec("char x = 0; x -= 65536; return x;")); - + // int assertEquals(1 - -2147483647, exec("int x = 1; x -= -2147483647; return x;")); assertEquals(-2 - 2147483647, exec("int x = -2; x -= 2147483647; return x;")); - + // long assertEquals(1L - -9223372036854775807L, exec("long x = 1; x -= -9223372036854775807L; return x;")); assertEquals(-2L - 9223372036854775807L, exec("long x = -2; x -= 9223372036854775807L; return x;")); } - + public void testAssignmentMultiplicationOverflow() { // byte assertEquals((byte) (2 * 128), exec("byte x = 2; x *= 128; return x;")); assertEquals((byte) (2 * -128), exec("byte x = 2; x *= -128; return x;")); - + // char assertEquals((char) (2 * 65536), exec("char x = 2; x *= 65536; return x;")); assertEquals((char) (2 * -65536), exec("char x = 2; x *= -65536; return x;")); - + // int assertEquals(2 * 2147483647, exec("int x = 2; x *= 2147483647; return x;")); assertEquals(2 * -2147483647, exec("int x = 2; x *= -2147483647; return x;")); - + // long assertEquals(2L * 9223372036854775807L, exec("long x = 2; x *= 9223372036854775807L; return x;")); assertEquals(2L * -9223372036854775807L, exec("long x = 2; x *= -9223372036854775807L; return x;")); } - + public void testAssignmentDivisionOverflow() { // byte assertEquals((byte) (-128 / -1), exec("byte x = (byte) -128; x /= -1; return x;")); // short assertEquals((short) (-32768 / -1), exec("short x = (short) -32768; x /= -1; return x;")); - + // cannot happen for char: unsigned - + // int assertEquals((-2147483647 - 1) / -1, exec("int x = -2147483647 - 1; x /= -1; return x;")); - + // long assertEquals((-9223372036854775807L - 1L) / -1L, exec("long x = -9223372036854775807L - 1L; x /=-1L; return x;")); } - + public void testIncrementOverFlow() throws Exception { // byte assertEquals((byte) 128, exec("byte x = 127; ++x; return x;")); assertEquals((byte) 128, exec("byte x = 127; x++; return x;")); assertEquals((byte) -129, exec("byte x = (byte) -128; --x; return x;")); assertEquals((byte) -129, exec("byte x = (byte) -128; x--; return x;")); - + // short assertEquals((short) 32768, exec("short x = 32767; ++x; return x;")); assertEquals((short) 32768, exec("short x = 32767; x++; return x;")); assertEquals((short) -32769, exec("short x = (short) -32768; --x; return x;")); assertEquals((short) -32769, exec("short x = (short) -32768; x--; return x;")); - + // char assertEquals((char) 65536, exec("char x = 65535; ++x; return x;")); assertEquals((char) 65536, exec("char x = 65535; x++; return x;")); assertEquals((char) -1, exec("char x = (char) 0; --x; return x;")); assertEquals((char) -1, exec("char x = (char) 0; x--; return x;")); - + // int - assertEquals(2147483647 + 1, exec("int x = 2147483647; ++x; return x;")); + assertEquals(2147483647 + 1, exec("int x = 2147483647; ++x; return x;")); assertEquals(2147483647 + 1, exec("int x = 2147483647; x++; return x;")); - assertEquals(-2147483648 - 1, exec("int x = (int) -2147483648L; --x; return x;")); + assertEquals(-2147483648 - 1, exec("int x = (int) -2147483648L; --x; return x;")); assertEquals(-2147483648 - 1, exec("int x = (int) -2147483648L; x--; return x;")); - + // long - assertEquals(9223372036854775807L + 1L, exec("long x = 9223372036854775807L; ++x; return x;")); + assertEquals(9223372036854775807L + 1L, exec("long x = 9223372036854775807L; ++x; return x;")); assertEquals(9223372036854775807L + 1L, exec("long x = 9223372036854775807L; x++; return x;")); assertEquals(-9223372036854775807L - 1L - 1L, exec("long x = -9223372036854775807L - 1L; --x; return x;")); assertEquals(-9223372036854775807L - 1L - 1L, exec("long x = -9223372036854775807L - 1L; x--; return x;")); } - + public void testAddition() throws Exception { - assertEquals(2147483647 + 2147483647, exec("int x = 2147483647; int y = 2147483647; return x + y;")); + assertEquals(2147483647 + 2147483647, exec("int x = 2147483647; int y = 2147483647; return x + y;")); assertEquals(9223372036854775807L + 9223372036854775807L, exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;")); } - + public void testAdditionConst() throws Exception { - assertEquals(2147483647 + 2147483647, exec("return 2147483647 + 2147483647;")); + assertEquals(2147483647 + 2147483647, exec("return 2147483647 + 2147483647;")); assertEquals(9223372036854775807L + 9223372036854775807L, exec("return 9223372036854775807L + 9223372036854775807L;")); } - + public void testSubtraction() throws Exception { - assertEquals(-10 - 2147483647, exec("int x = -10; int y = 2147483647; return x - y;")); + assertEquals(-10 - 2147483647, exec("int x = -10; int y = 2147483647; return x - y;")); assertEquals(-10L - 9223372036854775807L, exec("long x = -10L; long y = 9223372036854775807L; return x - y;")); } - + public void testSubtractionConst() throws Exception { - assertEquals(-10 - 2147483647, exec("return -10 - 2147483647;")); + assertEquals(-10 - 2147483647, exec("return -10 - 2147483647;")); assertEquals(-10L - 9223372036854775807L, exec("return -10L - 9223372036854775807L;")); } - + public void testMultiplication() throws Exception { - assertEquals(2147483647 * 2147483647, exec("int x = 2147483647; int y = 2147483647; return x * y;")); + assertEquals(2147483647 * 2147483647, exec("int x = 2147483647; int y = 2147483647; return x * y;")); assertEquals(9223372036854775807L * 9223372036854775807L, exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;")); } - + public void testMultiplicationConst() throws Exception { assertEquals(2147483647 * 2147483647, exec("return 2147483647 * 2147483647;")); assertEquals(9223372036854775807L * 9223372036854775807L, exec("return 9223372036854775807L * 9223372036854775807L;")); @@ -175,17 +175,17 @@ public class IntegerOverflowEnabledTests extends ScriptTestCase { assertEquals((-2147483647 - 1) / -1, exec("int x = -2147483648; int y = -1; return x / y;")); assertEquals((-9223372036854775807L - 1L) / -1L, exec("long x = -9223372036854775808L; long y = -1L; return x / y;")); } - + public void testDivisionConst() throws Exception { assertEquals((-2147483647 - 1) / -1, exec("return (-2147483648) / -1;")); assertEquals((-9223372036854775807L - 1L) / -1L, exec("return (-9223372036854775808L) / -1L;")); } - + public void testNegationOverflow() throws Exception { assertEquals(-(-2147483647 - 1), exec("int x = -2147483648; x = -x; return x;")); assertEquals(-(-9223372036854775807L - 1L), exec("long x = -9223372036854775808L; x = -x; return x;")); } - + public void testNegationOverflowConst() throws Exception { assertEquals(-(-2147483647 - 1), exec("int x = -(-2147483648); return x;")); assertEquals(-(-9223372036854775807L - 1L), exec("long x = -(-9223372036854775808L); return x;")); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java index c2c19ccb03a..5e0b0035ceb 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java @@ -21,22 +21,15 @@ package org.elasticsearch.plan.a; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; /** Runs yaml rest tests */ public class PlanARestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(PlanAPlugin.class); - } - public PlanARestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java index e5084392f99..8f2991c3d0c 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java @@ -35,6 +35,7 @@ public class ScriptEngineTests extends ScriptTestCase { assertEquals(3, ((Number)value).intValue()); } + @SuppressWarnings("unchecked") // We know its Map because we put them there in the test public void testMapAccess() { Map vars = new HashMap<>(); Map obj2 = new HashMap<>(); @@ -54,6 +55,7 @@ public class ScriptEngineTests extends ScriptTestCase { assertEquals("2", value); } + @SuppressWarnings("unchecked") // We know its Map because we put them there ourselves public void testAccessListInScript() { Map vars = new HashMap<>(); Map obj2 = new HashMap<>(); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java index 5b4948036f3..8ff87bd2a0d 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java @@ -48,7 +48,7 @@ public abstract class ScriptTestCase extends ESTestCase { /** Compiles and returns the result of {@code script} with access to {@code vars} */ public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, Boolean.toString(random().nextBoolean()))); + return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, Boolean.toString(random().nextBoolean()))); } /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java index 277778e7e76..e4dbce83122 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.plan.a; +import java.text.ParseException; import java.util.Collections; public class WhenThingsGoWrongTests extends ScriptTestCase { @@ -40,7 +41,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { fail("should have hit cce"); } catch (ClassCastException expected) {} } - + public void testBogusParameter() { try { exec("return 5;", null, Collections.singletonMap("bogusParameterKey", "bogusParameterValue")); @@ -49,4 +50,83 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { assertTrue(expected.getMessage().contains("Unrecognized compile-time parameter")); } } + + public void testInfiniteLoops() { + try { + exec("boolean x = true; while (x) {}"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("while (true) {int y = 5}"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("while (true) { boolean x = true; while (x) {} }"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("while (true) { boolean x = false; while (x) {} }"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("boolean x = true; for (;x;) {}"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("for (;;) {int x = 5}"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("def x = true; do {int y = 5;} while (x)"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("try { int x } catch (PlanAError error) {}"); + fail("should have hit ParseException"); + } catch (RuntimeException expected) { + assertTrue(expected.getMessage().contains( + "unexpected token ['PlanAError'] was expecting one of [TYPE].")); + } + + } + + public void testLoopLimits() { + exec("for (int x = 0; x < 9999; ++x) {}"); + + try { + exec("for (int x = 0; x < 10000; ++x) {}"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + } } diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java index d7d0ca662c8..ee0a707644f 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.script.python; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.python.PythonPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class LangPythonScriptRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(PythonPlugin.class); - } - public LangPythonScriptRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/mapper-attachments/build.gradle b/plugins/mapper-attachments/build.gradle index 70741c2094d..340832af6f8 100644 --- a/plugins/mapper-attachments/build.gradle +++ b/plugins/mapper-attachments/build.gradle @@ -61,8 +61,6 @@ dependencies { compile 'org.apache.commons:commons-compress:1.10' } -compileJava.options.compilerArgs << '-Xlint:-cast,-rawtypes' - forbiddenPatterns { exclude '**/*.docx' exclude '**/*.pdf' diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index 7c54e6f17ce..082adb958ec 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -27,6 +27,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -70,6 +71,9 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; public class AttachmentMapper extends FieldMapper { private static ESLogger logger = ESLoggerFactory.getLogger("mapper.attachment"); + public static final Setting INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING = Setting.boolSetting("index.mapping.attachment.ignore_errors", true, false, Setting.Scope.INDEX); + public static final Setting INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING = Setting.boolSetting("index.mapping.attachment.detect_language", false, false, Setting.Scope.INDEX); + public static final Setting INDEX_ATTACHMENT_INDEXED_CHARS_SETTING = Setting.intSetting("index.mapping.attachment.indexed_chars", 100000, false, Setting.Scope.INDEX); public static final String CONTENT_TYPE = "attachment"; @@ -124,23 +128,23 @@ public class AttachmentMapper extends FieldMapper { private Boolean langDetect = null; - private Mapper.Builder contentBuilder; + private Mapper.Builder contentBuilder; - private Mapper.Builder titleBuilder = stringField(FieldNames.TITLE); + private Mapper.Builder titleBuilder = stringField(FieldNames.TITLE); - private Mapper.Builder nameBuilder = stringField(FieldNames.NAME); + private Mapper.Builder nameBuilder = stringField(FieldNames.NAME); - private Mapper.Builder authorBuilder = stringField(FieldNames.AUTHOR); + private Mapper.Builder authorBuilder = stringField(FieldNames.AUTHOR); - private Mapper.Builder keywordsBuilder = stringField(FieldNames.KEYWORDS); + private Mapper.Builder keywordsBuilder = stringField(FieldNames.KEYWORDS); - private Mapper.Builder dateBuilder = dateField(FieldNames.DATE); + private Mapper.Builder dateBuilder = dateField(FieldNames.DATE); - private Mapper.Builder contentTypeBuilder = stringField(FieldNames.CONTENT_TYPE); + private Mapper.Builder contentTypeBuilder = stringField(FieldNames.CONTENT_TYPE); - private Mapper.Builder contentLengthBuilder = integerField(FieldNames.CONTENT_LENGTH); + private Mapper.Builder contentLengthBuilder = integerField(FieldNames.CONTENT_LENGTH); - private Mapper.Builder languageBuilder = stringField(FieldNames.LANGUAGE); + private Mapper.Builder languageBuilder = stringField(FieldNames.LANGUAGE); public Builder(String name) { super(name, new AttachmentFieldType(), new AttachmentFieldType()); @@ -148,47 +152,47 @@ public class AttachmentMapper extends FieldMapper { this.contentBuilder = stringField(FieldNames.CONTENT); } - public Builder content(Mapper.Builder content) { + public Builder content(Mapper.Builder content) { this.contentBuilder = content; return this; } - public Builder date(Mapper.Builder date) { + public Builder date(Mapper.Builder date) { this.dateBuilder = date; return this; } - public Builder author(Mapper.Builder author) { + public Builder author(Mapper.Builder author) { this.authorBuilder = author; return this; } - public Builder title(Mapper.Builder title) { + public Builder title(Mapper.Builder title) { this.titleBuilder = title; return this; } - public Builder name(Mapper.Builder name) { + public Builder name(Mapper.Builder name) { this.nameBuilder = name; return this; } - public Builder keywords(Mapper.Builder keywords) { + public Builder keywords(Mapper.Builder keywords) { this.keywordsBuilder = keywords; return this; } - public Builder contentType(Mapper.Builder contentType) { + public Builder contentType(Mapper.Builder contentType) { this.contentTypeBuilder = contentType; return this; } - public Builder contentLength(Mapper.Builder contentType) { + public Builder contentLength(Mapper.Builder contentType) { this.contentLengthBuilder = contentType; return this; } - public Builder language(Mapper.Builder language) { + public Builder language(Mapper.Builder language) { this.languageBuilder = language; return this; } @@ -202,7 +206,7 @@ public class AttachmentMapper extends FieldMapper { if (contentBuilder instanceof FieldMapper.Builder == false) { throw new IllegalStateException("content field for attachment must be a field mapper"); } - ((FieldMapper.Builder)contentBuilder).indexName(name); + ((FieldMapper.Builder)contentBuilder).indexName(name); contentBuilder.name = name + "." + FieldNames.CONTENT; contentMapper = (FieldMapper) contentBuilder.build(context); context.path().add(name); @@ -222,21 +226,21 @@ public class AttachmentMapper extends FieldMapper { context.path().remove(); if (defaultIndexedChars == null && context.indexSettings() != null) { - defaultIndexedChars = context.indexSettings().getAsInt("index.mapping.attachment.indexed_chars", 100000); + defaultIndexedChars = INDEX_ATTACHMENT_INDEXED_CHARS_SETTING.get(context.indexSettings()); } if (defaultIndexedChars == null) { defaultIndexedChars = 100000; } if (ignoreErrors == null && context.indexSettings() != null) { - ignoreErrors = context.indexSettings().getAsBoolean("index.mapping.attachment.ignore_errors", Boolean.TRUE); + ignoreErrors = INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING.get(context.indexSettings()); } if (ignoreErrors == null) { ignoreErrors = Boolean.TRUE; } if (langDetect == null && context.indexSettings() != null) { - langDetect = context.indexSettings().getAsBoolean("index.mapping.attachment.detect_language", Boolean.FALSE); + langDetect = INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING.get(context.indexSettings()); } if (langDetect == null) { langDetect = Boolean.FALSE; @@ -299,9 +303,9 @@ public class AttachmentMapper extends FieldMapper { return mapperBuilder; } - @SuppressWarnings({"unchecked"}) @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + @SuppressWarnings("unchecked") // Safe because we know how our maps are shaped + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { AttachmentMapper.Builder builder = new AttachmentMapper.Builder(name); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { @@ -316,7 +320,7 @@ public class AttachmentMapper extends FieldMapper { Map propNode = (Map) entry1.getValue(); Mapper.Builder mapperBuilder = findMapperBuilder(propNode, propName, parserContext); - if (parseMultiField((FieldMapper.Builder) mapperBuilder, fieldName, parserContext, propName, propNode)) { + if (parseMultiField((FieldMapper.Builder) mapperBuilder, fieldName, parserContext, propName, propNode)) { fieldsIterator.remove(); } else if (propName.equals(name) && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { builder.content(mapperBuilder); diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java index 9b640f98d16..e224607fff1 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java @@ -19,6 +19,7 @@ package org.elasticsearch.mapper.attachments; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.plugins.Plugin; @@ -34,6 +35,12 @@ public class MapperAttachmentsPlugin extends Plugin { return "Adds the attachment type allowing to parse difference attachment formats"; } + public void onModule(SettingsModule settingsModule) { + settingsModule.registerSetting(AttachmentMapper.INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING); + settingsModule.registerSetting(AttachmentMapper.INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING); + settingsModule.registerSetting(AttachmentMapper.INDEX_ATTACHMENT_INDEXED_CHARS_SETTING); + } + public void onModule(IndicesModule indicesModule) { indicesModule.registerMapper("attachment", new AttachmentMapper.TypeParser()); } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java index 1eecda65a05..1eccb1a1445 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.mapper.attachments; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; @@ -30,14 +29,6 @@ import java.io.IOException; public class MapperAttachmentsRestIT extends ESRestTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("plugin.types", MapperAttachmentsPlugin.class.getName()) - .build(); - } - public MapperAttachmentsRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java index 97c5ad994a4..bbe342c716c 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.mapper.murmur3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class MapperMurmur3RestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(MapperMurmur3Plugin.class); - } - public MapperMurmur3RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index 1b54b8293a7..1da604c4184 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -33,14 +33,22 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + MapperRegistry mapperRegistry; IndexService indexService; DocumentMapperParser parser; diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java index 9899776f7dd..84df085f221 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.mapper.size; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.mapper.MapperSizePlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class MapperSizeRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(MapperSizePlugin.class); - } - public MapperSizeRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index 956dd29402b..a44dddda3ed 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -34,9 +34,12 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -52,6 +55,11 @@ public class SizeMappingTests extends ESSingleNodeTestCase { MapperService mapperService; DocumentMapperParser parser; + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + @Before public void before() { indexService = createIndex("test"); @@ -139,12 +147,12 @@ public class SizeMappingTests extends ESSingleNodeTestCase { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); + DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); + DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertThat(disabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java index 86f58207e7d..3cecd810a95 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java @@ -123,9 +123,10 @@ public class AzureStorageServiceImpl extends AbstractLifecycleComponent> nodePlugins() { - return pluginList(AzureRepositoryPlugin.class, TestPlugin.class); + return pluginList(AzureRepositoryPlugin.class, TestPlugin.class, MockFSIndexStore.TestPlugin.class); } @Override @@ -93,8 +94,8 @@ public abstract class AbstractAzureRepositoryServiceTestCase extends AbstractAzu // During restore we frequently restore index to exactly the same state it was before, that might cause the same // checksum file to be written twice during restore operation return Settings.builder().put(super.indexSettings()) - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) - .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false) + .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) + .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING.getKey(), false) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build(); } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java index 0f144a13075..93683d9d014 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java @@ -61,6 +61,16 @@ public class AzureStorageServiceTest extends ESTestCase { assertThat(client.getEndpoint(), is(URI.create("https://azure1"))); } + public void testGetDefaultClientWithNoSecondary() { + AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(Settings.builder() + .put("cloud.azure.storage.azure1.account", "myaccount1") + .put("cloud.azure.storage.azure1.key", "mykey1") + .build()); + azureStorageService.doStart(); + CloudBlobClient client = azureStorageService.getSelectedClient(null, LocationMode.PRIMARY_ONLY); + assertThat(client.getEndpoint(), is(URI.create("https://azure1"))); + } + public void testGetSelectedClientPrimary() { AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings); azureStorageService.doStart(); @@ -82,6 +92,13 @@ public class AzureStorageServiceTest extends ESTestCase { assertThat(client.getEndpoint(), is(URI.create("https://azure3"))); } + public void testGetDefaultClientWithPrimaryAndSecondaries() { + AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings); + azureStorageService.doStart(); + CloudBlobClient client = azureStorageService.getSelectedClient(null, LocationMode.PRIMARY_ONLY); + assertThat(client.getEndpoint(), is(URI.create("https://azure1"))); + } + public void testGetSelectedClientNonExisting() { AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings); azureStorageService.doStart(); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java index d0267d51b0e..ad58838bf5b 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.repositories.azure; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.repository.azure.AzureRepositoryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AzureRepositoryRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AzureRepositoryPlugin.class); - } - public AzureRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java index cec7361de0a..cab2f98e940 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java @@ -86,8 +86,8 @@ public class AzureSnapshotRestoreTests extends AbstractAzureWithThirdPartyTestCa // During restore we frequently restore index to exactly the same state it was before, that might cause the same // checksum file to be written twice during restore operation return Settings.builder().put(super.indexSettings()) - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) - .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false) + .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) + .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING.getKey(), false) .build(); } diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java index db423cdd44f..dea6e8b749f 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java @@ -19,24 +19,15 @@ package org.elasticsearch.repositories.hdfs; import java.io.IOException; -import java.util.Collection; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.hdfs.HdfsPlugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; public class HdfsRepositoryRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(HdfsPlugin.class); - } - public HdfsRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java index 55f88fbfeea..151daaab2a8 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java @@ -59,8 +59,8 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase // During restore we frequently restore index to exactly the same state it was before, that might cause the same // checksum file to be written twice during restore operation return Settings.builder().put(super.indexSettings()) - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) - .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false) + .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) + .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING.getKey(), false) .put("cloud.enabled", true) .put("plugin.types", S3RepositoryPlugin.class.getName()) .put("repositories.s3.base_path", basePath) diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java index 8521780b1d8..d8e436b50bb 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.repositories.s3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.repository.s3.S3RepositoryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class RepositoryS3RestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(S3RepositoryPlugin.class); - } - public RepositoryS3RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java index 9d9bdc1d389..03a19d752c1 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java @@ -40,6 +40,6 @@ public class SmbMmapFsDirectoryService extends FsDirectoryService { @Override protected Directory newFSDirectory(Path location, LockFactory lockFactory) throws IOException { logger.debug("wrapping MMapDirectory for SMB"); - return new SmbDirectoryWrapper(new MMapDirectory(location, buildLockFactory(indexSettings))); + return new SmbDirectoryWrapper(new MMapDirectory(location, indexSettings.getValue(INDEX_LOCK_FACTOR_SETTING))); } } diff --git a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java index 649af173763..af1b0372995 100644 --- a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java +++ b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.store; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.store.smb.SMBStorePlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class SMBStoreRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(SMBStorePlugin.class); - } - public SMBStoreRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index cddea9fd774..227936beb42 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -67,11 +67,6 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { */ public static final String TESTS_CLUSTER = "tests.cluster"; - /** - * Defaults to localhost:9300 - */ - public static final String TESTS_CLUSTER_DEFAULT = "localhost:9300"; - protected static final ESLogger logger = ESLoggerFactory.getLogger(ESSmokeClientTestCase.class.getName()); private static final AtomicInteger counter = new AtomicInteger(); @@ -131,11 +126,10 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { } @BeforeClass - public static void initializeSettings() throws UnknownHostException { + public static void initializeSettings() { clusterAddresses = System.getProperty(TESTS_CLUSTER); if (clusterAddresses == null || clusterAddresses.isEmpty()) { - clusterAddresses = TESTS_CLUSTER_DEFAULT; - logger.info("[{}] not set. Falling back to [{}]", TESTS_CLUSTER, TESTS_CLUSTER_DEFAULT); + fail("Must specify " + TESTS_CLUSTER + " for smoke client test"); } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json index 7f13d47fc06..baba4e3436a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json @@ -18,6 +18,11 @@ "timeout": { "type" : "time", "description" : "Explicit operation timeout" + }, + "include_defaults": { + "type": "boolean", + "description": "Whether to return all default clusters setting.", + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json index 2c0c59f6898..ed96200687c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json @@ -44,6 +44,11 @@ "type": "boolean", "description": "Whether to return version and creation date values in human-readable format.", "default": false + }, + "include_defaults": { + "type": "boolean", + "description": "Whether to return all default setting for each of the indices.", + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json index 1e35c272fa0..17a1cd03118 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json @@ -42,6 +42,11 @@ "type": "boolean", "description": "Whether to return version and creation date values in human-readable format.", "default": false + }, + "include_defaults": { + "type": "boolean", + "description": "Whether to return all default setting for each of the indices.", + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/11_reset.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/11_reset.yaml new file mode 100644 index 00000000000..6c93dabeec7 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/11_reset.yaml @@ -0,0 +1,33 @@ +--- +setup: + - do: + indices.create: + body: + settings: + index: + refresh_interval: 10s + index: test-index +--- +Test reset index settings: + - do: + indices.get_settings: + flat_settings: true + index: test-index + - match: + test-index.settings.index\.refresh_interval: "10s" + - do: + indices.put_settings: + body: + refresh_interval: null + - do: + indices.get_settings: + flat_settings: false + - is_false: + test-index.settings.index\.refresh_interval + - do: + indices.get_settings: + include_defaults: true + flat_settings: true + index: test-index + - match: + test-index.defaults.index\.refresh_interval: "1s" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yaml index 78da54ef8fd..a1367f8c7d2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yaml @@ -5,7 +5,7 @@ body: settings: index: - translog.disable_flush: true + translog.flush_threshold_size: "512MB" number_of_shards: 1 number_of_replicas: 0 refresh_interval: -1 diff --git a/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java b/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java index db63d137f32..3c72701f214 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java @@ -22,8 +22,10 @@ import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.engine.MockEngineFactory; +import org.elasticsearch.test.engine.MockEngineSupport; import java.util.Collection; import java.util.Collections; @@ -41,6 +43,11 @@ public class MockEngineFactoryPlugin extends Plugin { private Class readerWrapper = AssertingDirectoryReader.class; + public void onModule(SettingsModule module) { + module.registerSetting(MockEngineSupport.DISABLE_FLUSH_ON_CLOSE); + module.registerSetting(MockEngineSupport.WRAP_READER_RATIO); + } + @Override public void onIndexModule(IndexModule module) { module.engineFactory.set(new MockEngineFactory(readerWrapper)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ActionRecordingPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/ActionRecordingPlugin.java new file mode 100644 index 00000000000..a51c3f9eb40 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/ActionRecordingPlugin.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionModule; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +import static java.util.Collections.unmodifiableList; + +/** + * Plugin that registers a filter that records actions. + */ +public class ActionRecordingPlugin extends Plugin { + /** + * Fetch all the requests recorded by the test plugin. The list is an + * immutable, moment in time snapshot. + */ + public static List> allRequests() { + List> requests = new ArrayList<>(); + for (RecordingFilter filter : ESIntegTestCase.internalCluster().getInstances(RecordingFilter.class)) { + requests.addAll(filter.requests); + } + return unmodifiableList(requests); + } + + /** + * Fetch all requests recorded by the test plugin of a certain type. The + * list is an immutable, moment in time snapshot. + */ + public static List requestsOfType(Class type) { + List requests = new ArrayList<>(); + for (RecordingFilter filter : ESIntegTestCase.internalCluster().getInstances(RecordingFilter.class)) { + for (ActionRequest request : filter.requests) { + if (type.isInstance(request)) { + requests.add(type.cast(request)); + } + } + } + return unmodifiableList(requests); + } + + /** + * Clear all the recorded requests. Use between test methods that shared a + * suite scoped cluster. + */ + public static void clear() { + for (RecordingFilter filter : ESIntegTestCase.internalCluster().getInstances(RecordingFilter.class)) { + filter.requests.clear(); + } + } + + @Override + public String name() { + return "test-action-logging"; + } + + @Override + public String description() { + return "Test action logging"; + } + + @Override + public Collection nodeModules() { + return Collections.singletonList(new ActionRecordingModule()); + } + + public void onModule(ActionModule module) { + module.registerFilter(RecordingFilter.class); + } + + public static class ActionRecordingModule extends AbstractModule { + @Override + protected void configure() { + bind(RecordingFilter.class).asEagerSingleton(); + } + + } + + public static class RecordingFilter extends ActionFilter.Simple { + private final List> requests = new CopyOnWriteArrayList<>(); + + @Inject + public RecordingFilter(Settings settings) { + super(settings); + } + + public List> getRequests() { + return new ArrayList<>(requests); + } + + @Override + public int order() { + return 999; + } + + @Override + protected boolean apply(String action, ActionRequest request, ActionListener listener) { + requests.add(request); + return true; + } + + @Override + protected boolean apply(String action, ActionResponse response, ActionListener listener) { + return true; + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 3124fc9f8bc..f1f757fa08f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -75,8 +75,11 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -159,6 +162,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BooleanSupplier; +import java.util.function.Function; import static org.elasticsearch.client.Requests.syncedFlushRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; @@ -266,7 +270,7 @@ public abstract class ESIntegTestCase extends ESTestCase { * The value of this seed can be used to initialize a random context for a specific index. * It's set once per test via a generic index template. */ - public static final String SETTING_INDEX_SEED = "index.tests.seed"; + public static final Setting INDEX_TEST_SEED_SETTING = Setting.longSetting("index.tests.seed", 0, Long.MIN_VALUE, false, Setting.Scope.INDEX); /** * A boolean value to enable or disable mock modules. This is useful to test the @@ -365,8 +369,11 @@ public abstract class ESIntegTestCase extends ESTestCase { // TODO move settings for random directory etc here into the index based randomized settings. if (cluster().size() > 0) { Settings.Builder randomSettingsBuilder = - setRandomIndexSettings(getRandom(), Settings.builder()) - .put(SETTING_INDEX_SEED, getRandom().nextLong()); + setRandomIndexSettings(getRandom(), Settings.builder()); + if (isInternalCluster()) { + // this is only used by mock plugins and if the cluster is not internal we just can't set it + randomSettingsBuilder.put(INDEX_TEST_SEED_SETTING.getKey(), getRandom().nextLong()); + } randomSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, numberOfShards()) .put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas()); @@ -465,20 +472,20 @@ public abstract class ESIntegTestCase extends ESTestCase { setRandomIndexNormsLoading(random, builder); if (random.nextBoolean()) { - builder.put(MergeSchedulerConfig.AUTO_THROTTLE, false); + builder.put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), false); } if (random.nextBoolean()) { - builder.put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, random.nextBoolean()); + builder.put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), random.nextBoolean()); } if (random.nextBoolean()) { - builder.put("index.shard.check_on_startup", randomFrom(random, "false", "checksum", "true")); + builder.put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "checksum", "true")); } if (randomBoolean()) { // keep this low so we don't stall tests - builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, RandomInts.randomIntBetween(random, 1, 15) + "ms"); + builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), RandomInts.randomIntBetween(random, 1, 15) + "ms"); } return builder; @@ -486,15 +493,15 @@ public abstract class ESIntegTestCase extends ESTestCase { private static Settings.Builder setRandomIndexMergeSettings(Random random, Settings.Builder builder) { if (random.nextBoolean()) { - builder.put(MergePolicyConfig.INDEX_COMPOUND_FORMAT, + builder.put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), random.nextBoolean() ? random.nextDouble() : random.nextBoolean()); } switch (random.nextInt(4)) { case 3: final int maxThreadCount = RandomInts.randomIntBetween(random, 1, 4); final int maxMergeCount = RandomInts.randomIntBetween(random, maxThreadCount, maxThreadCount + 4); - builder.put(MergeSchedulerConfig.MAX_MERGE_COUNT, maxMergeCount); - builder.put(MergeSchedulerConfig.MAX_THREAD_COUNT, maxThreadCount); + builder.put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), maxMergeCount); + builder.put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), maxThreadCount); break; } @@ -503,27 +510,27 @@ public abstract class ESIntegTestCase extends ESTestCase { private static Settings.Builder setRandomIndexNormsLoading(Random random, Settings.Builder builder) { if (random.nextBoolean()) { - builder.put(SearchService.NORMS_LOADING_KEY, RandomPicks.randomFrom(random, Arrays.asList(MappedFieldType.Loading.EAGER, MappedFieldType.Loading.LAZY))); + builder.put(SearchService.INDEX_NORMS_LOADING_SETTING.getKey(), RandomPicks.randomFrom(random, Arrays.asList(MappedFieldType.Loading.EAGER, MappedFieldType.Loading.LAZY))); } return builder; } private static Settings.Builder setRandomIndexTranslogSettings(Random random, Settings.Builder builder) { if (random.nextBoolean()) { - builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 300), ByteSizeUnit.MB)); + builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 300), ByteSizeUnit.MB)); } if (random.nextBoolean()) { - builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush + builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush } if (random.nextBoolean()) { - builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY, RandomPicks.randomFrom(random, Translog.Durability.values())); + builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), RandomPicks.randomFrom(random, Translog.Durability.values())); } if (random.nextBoolean()) { if (rarely(random)) { - builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, 0); // 0 has special meaning to sync each op + builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), 0); // 0 has special meaning to sync each op } else { - builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, RandomInts.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS); + builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), RandomInts.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS); } } @@ -1283,7 +1290,7 @@ public abstract class ESIntegTestCase extends ESTestCase { */ protected final void enableAllocation(String... indices) { client().admin().indices().prepareUpdateSettings(indices).setSettings(Settings.builder().put( - EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, "all" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all" )).get(); } @@ -1292,7 +1299,7 @@ public abstract class ESIntegTestCase extends ESTestCase { */ protected final void disableAllocation(String... indices) { client().admin().indices().prepareUpdateSettings(indices).setSettings(Settings.builder().put( - EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, "none" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none" )).get(); } @@ -1822,9 +1829,25 @@ public abstract class ESIntegTestCase extends ESTestCase { mocks.add(AssertingLocalTransport.TestPlugin.class); } } + mocks.add(TestSeedPlugin.class); return Collections.unmodifiableList(mocks); } + public static final class TestSeedPlugin extends Plugin { + @Override + public String name() { + return "test-seed-plugin"; + } + @Override + public String description() { + return "a test plugin that registeres index.tests.seed as an index setting"; + } + public void onModule(SettingsModule module) { + module.registerSetting(INDEX_TEST_SEED_SETTING); + } + + } + /** * Returns the client ratio configured via */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java index 39e1857f412..f4e08979e7c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java +++ b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java @@ -21,11 +21,16 @@ package org.elasticsearch.test; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import java.util.Collections; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; public class IndexSettingsModule extends AbstractModule { @@ -43,17 +48,21 @@ public class IndexSettingsModule extends AbstractModule { bind(IndexSettings.class).toInstance(newIndexSettings(index, settings)); } - public static IndexSettings newIndexSettings(String index, Settings settings) { - return newIndexSettings(new Index(index), settings); + public static IndexSettings newIndexSettings(String index, Settings settings, Setting... setting) { + return newIndexSettings(new Index(index), settings, setting); } - public static IndexSettings newIndexSettings(Index index, Settings settings) { + public static IndexSettings newIndexSettings(Index index, Settings settings, Setting... setting) { Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(settings) - .build(); + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(settings) + .build(); IndexMetaData metaData = IndexMetaData.builder(index.getName()).settings(build).build(); - return new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + Set> settingSet = new HashSet<>(IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + if (setting.length > 0) { + settingSet.addAll(Arrays.asList(setting)); + } + return new IndexSettings(metaData, Settings.EMPTY, (idx) -> Regex.simpleMatch(idx, metaData.getIndex()), new IndexScopedSettings(Settings.EMPTY, settingSet)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java new file mode 100644 index 00000000000..64719f0f9de --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test; + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.SettingsModule; +import org.elasticsearch.plugins.Plugin; + +public final class InternalSettingsPlugin extends Plugin { + @Override + public String name() { + return "internal-settings-plugin"; + } + + @Override + public String description() { + return "a plugin that allows to set values for internal settings which are can't be set via the ordinary API without this pluging installed"; + } + + public static final Setting VERSION_CREATED = Setting.intSetting("index.version.created", 0, false, Setting.Scope.INDEX); + public static final Setting MERGE_ENABLED = Setting.boolSetting("index.merge.enabled", true, false, Setting.Scope.INDEX); + public static final Setting INDEX_CREATION_DATE_SETTING = Setting.longSetting(IndexMetaData.SETTING_CREATION_DATE, -1, -1, false, Setting.Scope.INDEX); + + public void onModule(SettingsModule module) { + module.registerSetting(VERSION_CREATED); + module.registerSetting(MERGE_ENABLED); + module.registerSetting(INDEX_CREATION_DATE_SETTING); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index c0946ccefca..2f02e46f58b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -425,11 +425,11 @@ public final class InternalTestCluster extends TestCluster { } if (random.nextBoolean()) { - builder.put(IndexModule.QUERY_CACHE_TYPE, random.nextBoolean() ? IndexModule.INDEX_QUERY_CACHE : IndexModule.NONE_QUERY_CACHE); + builder.put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), random.nextBoolean() ? IndexModule.INDEX_QUERY_CACHE : IndexModule.NONE_QUERY_CACHE); } if (random.nextBoolean()) { - builder.put(IndexModule.QUERY_CACHE_EVERYTHING, random.nextBoolean()); + builder.put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), random.nextBoolean()); } if (random.nextBoolean()) { @@ -463,7 +463,7 @@ public final class InternalTestCluster extends TestCluster { } // always default delayed allocation to 0 to make sure we have tests are not delayed - builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, 0); + builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0); return builder.build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java index c02c1d8503f..13f533a583e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java @@ -21,7 +21,9 @@ package org.elasticsearch.test; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; @@ -58,6 +60,14 @@ public final class MockIndexEventListener { return "a mock index listener for testing only"; } + /** + * For tests to pass in to fail on listener invocation + */ + public static final Setting INDEX_FAIL = Setting.boolSetting("index.fail", false, false, Setting.Scope.INDEX); + public void onModule(SettingsModule module) { + module.registerSetting(INDEX_FAIL); + } + @Override public void onIndexModule(IndexModule module) { module.addIndexEventListener(listener); diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java index e97178cfaab..fd99f0d453d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java @@ -39,7 +39,7 @@ public class NoOpClient extends AbstractClient { } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute(Action action, Request request, ActionListener listener) { listener.onResponse(null); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index 50d75eb3716..ddccfe88e38 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -95,7 +95,7 @@ public final class MockEngineSupport { shardId = config.getShardId(); filterCache = config.getQueryCache(); filterCachingPolicy = config.getQueryCachingPolicy(); - final long seed = settings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l); + final long seed = config.getIndexSettings().getValue(ESIntegTestCase.INDEX_TEST_SEED_SETTING); Random random = new Random(seed); final double ratio = WRAP_READER_RATIO.get(settings); boolean wrapReader = random.nextDouble() < ratio; diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 969d59d885e..1c9bddb6b45 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.internal.AssumptionViolatedException; import org.junit.runner.Description; import org.junit.runner.notification.Failure; @@ -39,7 +40,6 @@ import static org.elasticsearch.test.ESIntegTestCase.TESTS_CLUSTER; import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_BLACKLIST; import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_SPEC; import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_SUITE; -import static org.elasticsearch.test.rest.ESRestTestCase.Rest; /** * A {@link RunListener} that emits to {@link System#err} a string with command @@ -82,7 +82,7 @@ public class ReproduceInfoPrinter extends RunListener { gradleMessageBuilder.appendAllOpts(failure.getDescription()); //Rest tests are a special case as they allow for additional parameters - if (failure.getDescription().getTestClass().isAnnotationPresent(Rest.class)) { + if (ESRestTestCase.class.isAssignableFrom(failure.getDescription().getTestClass())) { gradleMessageBuilder.appendRestTestsProperties(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index b4aecd52a14..5684717342d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -20,20 +20,12 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.carrotsearch.randomizedtesting.annotations.TestGroup; -import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.apache.lucene.util.LuceneTestCase.SuppressFsync; -import org.apache.lucene.util.TimeUnits; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.node.Node; -import org.elasticsearch.repositories.uri.URLRepository; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.client.RestException; import org.elasticsearch.test.rest.parser.RestTestParseException; import org.elasticsearch.test.rest.parser.RestTestSuiteParser; @@ -45,17 +37,14 @@ import org.elasticsearch.test.rest.section.TestSection; import org.elasticsearch.test.rest.spec.RestApi; import org.elasticsearch.test.rest.spec.RestSpec; import org.elasticsearch.test.rest.support.FileUtils; +import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import java.io.IOException; import java.io.InputStream; -import java.lang.annotation.ElementType; -import java.lang.annotation.Inherited; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; +import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; @@ -67,6 +56,7 @@ import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -74,27 +64,7 @@ import java.util.Set; /** * Runs the clients test suite against an elasticsearch cluster. */ -@ESRestTestCase.Rest -@SuppressFsync // we aren't trying to test this here, and it can make the test slow -@SuppressCodecs("*") // requires custom completion postings format -@ClusterScope(randomDynamicTemplates = false) -@TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // timeout the suite after 40min and fail the test. -public abstract class ESRestTestCase extends ESIntegTestCase { - - /** - * Property that allows to control whether the REST tests are run (default) or not - */ - public static final String TESTS_REST = "tests.rest"; - - /** - * Annotation for REST tests - */ - @Inherited - @Retention(RetentionPolicy.RUNTIME) - @Target(ElementType.TYPE) - @TestGroup(enabled = true, sysProperty = ESRestTestCase.TESTS_REST) - public @interface Rest { - } +public abstract class ESRestTestCase extends ESTestCase { /** * Property that allows to control which REST tests get run. Supports comma separated list of tests @@ -132,7 +102,9 @@ public abstract class ESRestTestCase extends ESIntegTestCase { private static final String PATHS_SEPARATOR = "(? blacklistPathMatchers = new ArrayList<>(); + private final URL[] clusterUrls; private static RestTestExecutionContext restTestExecutionContext; + private static RestTestExecutionContext adminExecutionContext; private final RestTestCandidate testCandidate; @@ -142,6 +114,20 @@ public abstract class ESRestTestCase extends ESIntegTestCase { for (String entry : blacklist) { this.blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); } + String cluster = System.getProperty("tests.rest.cluster"); + if (cluster == null) { + throw new RuntimeException("Must specify tests.rest.cluster for rest tests"); + } + String[] stringUrls = cluster.split(","); + clusterUrls = new URL[stringUrls.length]; + int i = 0; + try { + for (String stringUrl : stringUrls) { + clusterUrls[i++] = new URL("http://" + stringUrl); + } + } catch (IOException e) { + throw new RuntimeException("Failed to parse cluster addresses for rest test", e); + } } @Override @@ -150,28 +136,7 @@ public abstract class ESRestTestCase extends ESIntegTestCase { super.afterIfFailed(errors); } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .putArray(URLRepository.ALLOWED_URLS_SETTING, "http://snapshot.test*") - .put(Node.HTTP_ENABLED, true) - .put("node.testattr", "test") - .put(super.nodeSettings(nodeOrdinal)).build(); - } - public static Iterable createParameters(int id, int count) throws IOException, RestTestParseException { - TestGroup testGroup = Rest.class.getAnnotation(TestGroup.class); - String sysProperty = TestGroup.Utilities.getSysProperty(Rest.class); - boolean enabled; - try { - enabled = RandomizedTest.systemPropertyAsBoolean(sysProperty, testGroup.enabled()); - } catch (IllegalArgumentException e) { - // Ignore malformed system property, disable the group if malformed though. - enabled = false; - } - if (!enabled) { - return new ArrayList<>(); - } //parse tests only if rest test group is enabled, otherwise rest tests might not even be available on file system List restTestCandidates = collectTestCandidates(id, count); List objects = new ArrayList<>(); @@ -274,6 +239,7 @@ public abstract class ESRestTestCase extends ESIntegTestCase { } validateSpec(restSpec); restTestExecutionContext = new RestTestExecutionContext(restSpec); + adminExecutionContext = new RestTestExecutionContext(restSpec); } private static void validateSpec(RestSpec restSpec) { @@ -293,27 +259,42 @@ public abstract class ESRestTestCase extends ESIntegTestCase { } } + @After + public void wipeCluster() throws Exception { + + // wipe indices + Map deleteIndicesArgs = new HashMap<>(); + deleteIndicesArgs.put("index", "*"); + try { + adminExecutionContext.callApi("indices.delete", deleteIndicesArgs, Collections.emptyList(), Collections.emptyMap()); + } catch (RestException e) { + // 404 here just means we had no indexes + if (e.statusCode() != 404) { + throw e; + } + } + + // wipe index templates + Map deleteTemplatesArgs = new HashMap<>(); + deleteTemplatesArgs.put("name", "*"); + adminExecutionContext.callApi("indices.delete_template", deleteTemplatesArgs, Collections.emptyList(), Collections.emptyMap()); + + // wipe snapshots + Map deleteSnapshotsArgs = new HashMap<>(); + deleteSnapshotsArgs.put("repository", "*"); + adminExecutionContext.callApi("snapshot.delete_repository", deleteSnapshotsArgs, Collections.emptyList(), Collections.emptyMap()); + } + @AfterClass public static void close() { if (restTestExecutionContext != null) { restTestExecutionContext.close(); + adminExecutionContext.close(); restTestExecutionContext = null; + adminExecutionContext = null; } } - @Override - protected int maximumNumberOfShards() { - return 3; // never go crazy in the REST tests - } - - @Override - protected int maximumNumberOfReplicas() { - // hardcoded 1 since this is what clients also do and our tests must expect that we have only node - // with replicas set to 1 ie. the cluster won't be green - return 1; - - } - /** * Used to obtain settings for the REST client that is used to send REST requests. */ @@ -321,15 +302,29 @@ public abstract class ESRestTestCase extends ESIntegTestCase { return Settings.EMPTY; } + /** Returns the REST client settings used for admin actions like cleaning up after the test has completed. */ + protected Settings restAdminSettings() { + return restClientSettings(); // default to the same client settings + } + + /** Returns the addresses the client uses to connect to the test cluster. */ + protected URL[] getClusterUrls() { + return clusterUrls; + } + @Before public void reset() throws IOException, RestException { + // admin context must be available for @After always, regardless of whether the test was blacklisted + adminExecutionContext.initClient(clusterUrls, restAdminSettings()); + adminExecutionContext.clear(); + //skip test if it matches one of the blacklist globs for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) { String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName(); assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher.isSuffixMatch(testPath)); } //The client needs non static info to get initialized, therefore it can't be initialized in the before class - restTestExecutionContext.initClient(cluster().httpAddresses(), restClientSettings()); + restTestExecutionContext.initClient(clusterUrls, restClientSettings()); restTestExecutionContext.clear(); //skip test if the whole suite (yaml file) is disabled diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index 4054b8efce1..83860b18bd9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -31,6 +31,7 @@ import org.elasticsearch.test.rest.spec.RestSpec; import java.io.Closeable; import java.io.IOException; import java.net.InetSocketAddress; +import java.net.URL; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -119,9 +120,9 @@ public class RestTestExecutionContext implements Closeable { /** * Creates the embedded REST client when needed. Needs to be called before each test. */ - public void initClient(InetSocketAddress[] addresses, Settings settings) throws IOException, RestException { + public void initClient(URL[] urls, Settings settings) throws IOException, RestException { if (restClient == null) { - restClient = new RestClient(restSpec, settings, addresses); + restClient = new RestClient(restSpec, settings, urls); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java index 63a8b397c45..2b6ded9a5f5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; @@ -48,6 +49,7 @@ import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.net.InetSocketAddress; +import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.security.KeyManagementException; @@ -80,18 +82,18 @@ public class RestClient implements Closeable { private final RestSpec restSpec; private final CloseableHttpClient httpClient; private final Headers headers; - private final InetSocketAddress[] addresses; + private final URL[] urls; private final Version esVersion; - public RestClient(RestSpec restSpec, Settings settings, InetSocketAddress[] addresses) throws IOException, RestException { - assert addresses.length > 0; + public RestClient(RestSpec restSpec, Settings settings, URL[] urls) throws IOException, RestException { + assert urls.length > 0; this.restSpec = restSpec; this.headers = new Headers(settings); this.protocol = settings.get(PROTOCOL, "http"); this.httpClient = createHttpClient(settings); - this.addresses = addresses; + this.urls = urls; this.esVersion = readAndCheckVersion(); - logger.info("REST client initialized {}, elasticsearch version: [{}]", addresses, esVersion); + logger.info("REST client initialized {}, elasticsearch version: [{}]", urls, esVersion); } private Version readAndCheckVersion() throws IOException, RestException { @@ -102,8 +104,8 @@ public class RestClient implements Closeable { assert restApi.getMethods().size() == 1; String version = null; - for (InetSocketAddress address : addresses) { - RestResponse restResponse = new RestResponse(httpRequestBuilder(address) + for (URL url : urls) { + RestResponse restResponse = new RestResponse(httpRequestBuilder(url) .path(restApi.getPaths().get(0)) .method(restApi.getMethods().get(0)).execute()); checkStatusCode(restResponse); @@ -152,6 +154,8 @@ public class RestClient implements Closeable { HttpRequestBuilder httpRequestBuilder = callApiBuilder(apiName, requestParams, body); for (Map.Entry header : headers.entrySet()) { + logger.error("Adding header " + header.getKey()); + logger.error(" with value " + header.getValue()); httpRequestBuilder.addHeader(header.getKey(), header.getValue()); } logger.debug("calling api [{}]", apiName); @@ -246,17 +250,18 @@ public class RestClient implements Closeable { return restApi; } - protected HttpRequestBuilder httpRequestBuilder(InetSocketAddress address) { + protected HttpRequestBuilder httpRequestBuilder(URL url) { return new HttpRequestBuilder(httpClient) .addHeaders(headers) .protocol(protocol) - .host(NetworkAddress.formatAddress(address.getAddress())).port(address.getPort()); + .host(url.getHost()) + .port(url.getPort()); } protected HttpRequestBuilder httpRequestBuilder() { //the address used is randomized between the available ones - InetSocketAddress address = RandomizedTest.randomFrom(addresses); - return httpRequestBuilder(address); + URL url = RandomizedTest.randomFrom(urls); + return httpRequestBuilder(url); } protected CloseableHttpClient createHttpClient(Settings settings) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java index 036310e4e47..b99bf3475da 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java @@ -136,7 +136,7 @@ public class RestTestSuiteParseContext { token = parser.nextToken(); } if (token != XContentParser.Token.FIELD_NAME) { - throw new RestTestParseException("malformed test section: field name expected but found " + token); + throw new RestTestParseException("malformed test section: field name expected but found " + token + " at " + parser.getTokenLocation()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java index e8422887ad4..ba1b99288e3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java @@ -18,11 +18,6 @@ */ package org.elasticsearch.test.rest.parser; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.rest.section.RestTestSuite; -import org.elasticsearch.test.rest.section.TestSection; - import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; @@ -30,6 +25,11 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.yaml.YamlXContent; +import org.elasticsearch.test.rest.section.RestTestSuite; +import org.elasticsearch.test.rest.section.TestSection; + /** * Parser for a complete test suite (yaml file) */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 25c29f0c6c9..ef3be122cdb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -32,11 +32,13 @@ import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestRuleMarkFailure; +import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -57,13 +59,15 @@ import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.Random; +import java.util.Set; public class MockFSDirectoryService extends FsDirectoryService { - public static final String RANDOM_IO_EXCEPTION_RATE_ON_OPEN = "index.store.mock.random.io_exception_rate_on_open"; - public static final String RANDOM_PREVENT_DOUBLE_WRITE = "index.store.mock.random.prevent_double_write"; - public static final String RANDOM_NO_DELETE_OPEN_FILE = "index.store.mock.random.no_delete_open_file"; - public static final String CRASH_INDEX = "index.store.mock.random.crash_index"; + public static final Setting RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING = Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting RANDOM_IO_EXCEPTION_RATE_SETTING = Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting RANDOM_PREVENT_DOUBLE_WRITE_SETTING = Setting.boolSetting("index.store.mock.random.prevent_double_write", true, false, Setting.Scope.INDEX);// true is default in MDW + public static final Setting RANDOM_NO_DELETE_OPEN_FILE_SETTING = Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, false, Setting.Scope.INDEX);// true is default in MDW + public static final Setting CRASH_INDEX_SETTING = Setting.boolSetting("index.store.mock.random.crash_index", true, false, Setting.Scope.INDEX);// true is default in MDW private final FsDirectoryService delegateService; private final Random random; @@ -78,16 +82,16 @@ public class MockFSDirectoryService extends FsDirectoryService { public MockFSDirectoryService(IndexSettings idxSettings, IndexStore indexStore, final ShardPath path) { super(idxSettings, indexStore, path); Settings indexSettings = idxSettings.getSettings(); - final long seed = indexSettings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l); + final long seed = idxSettings.getValue(ESIntegTestCase.INDEX_TEST_SEED_SETTING); this.random = new Random(seed); - randomIOExceptionRate = indexSettings.getAsDouble(RANDOM_IO_EXCEPTION_RATE, 0.0d); - randomIOExceptionRateOnOpen = indexSettings.getAsDouble(RANDOM_IO_EXCEPTION_RATE_ON_OPEN, 0.0d); - preventDoubleWrite = indexSettings.getAsBoolean(RANDOM_PREVENT_DOUBLE_WRITE, true); // true is default in MDW - noDeleteOpenFile = indexSettings.getAsBoolean(RANDOM_NO_DELETE_OPEN_FILE, random.nextBoolean()); // true is default in MDW + randomIOExceptionRate = RANDOM_IO_EXCEPTION_RATE_SETTING.get(indexSettings); + randomIOExceptionRateOnOpen = RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.get(indexSettings); + preventDoubleWrite = RANDOM_PREVENT_DOUBLE_WRITE_SETTING.get(indexSettings); + noDeleteOpenFile = RANDOM_NO_DELETE_OPEN_FILE_SETTING.exists(indexSettings) ? RANDOM_NO_DELETE_OPEN_FILE_SETTING.get(indexSettings) : random.nextBoolean(); random.nextInt(shardId.getId() + 1); // some randomness per shard throttle = MockDirectoryWrapper.Throttling.NEVER; - crashIndex = indexSettings.getAsBoolean(CRASH_INDEX, true); + crashIndex = CRASH_INDEX_SETTING.get(indexSettings); if (logger.isDebugEnabled()) { logger.debug("Using MockDirWrapper with seed [{}] throttle: [{}] crashIndex: [{}]", SeedUtils.formatSeed(seed), @@ -161,8 +165,6 @@ public class MockFSDirectoryService extends FsDirectoryService { return delegateService.throttleTimeInNanos(); } - public static final String RANDOM_IO_EXCEPTION_RATE = "index.store.mock.random.io_exception_rate"; - private Directory wrap(Directory dir) { final ElasticsearchMockDirectoryWrapper w = new ElasticsearchMockDirectoryWrapper(random, dir, this.crashIndex); w.setRandomIOExceptionRate(randomIOExceptionRate); @@ -180,8 +182,8 @@ public class MockFSDirectoryService extends FsDirectoryService { private FsDirectoryService randomDirectorService(IndexStore indexStore, ShardPath path) { final IndexSettings indexSettings = indexStore.getIndexSettings(); - final IndexMetaData build = IndexMetaData.builder(indexSettings.getIndexMetaData()).settings(Settings.builder().put(indexSettings.getSettings()).put(IndexModule.STORE_TYPE, RandomPicks.randomFrom(random, IndexModule.Type.values()).getSettingsKey())).build(); - final IndexSettings newIndexSettings = new IndexSettings(build, indexSettings.getNodeSettings(), Collections.emptyList()); + final IndexMetaData build = IndexMetaData.builder(indexSettings.getIndexMetaData()).settings(Settings.builder().put(indexSettings.getSettings()).put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), RandomPicks.randomFrom(random, IndexModule.Type.values()).getSettingsKey())).build(); + final IndexSettings newIndexSettings = new IndexSettings(build, indexSettings.getNodeSettings()); return new FsDirectoryService(newIndexSettings, indexStore, path); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java index 3fe700701dd..80251d54951 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -22,7 +22,9 @@ package org.elasticsearch.test.store; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexEventListener; @@ -42,7 +44,7 @@ import java.util.Map; public class MockFSIndexStore extends IndexStore { - public static final String CHECK_INDEX_ON_CLOSE = "index.store.mock.check_index_on_close"; + public static final Setting INDEX_CHECK_INDEX_ON_CLOSE_SETTING = Setting.boolSetting("index.store.mock.check_index_on_close", true, false, Setting.Scope.INDEX); public static class TestPlugin extends Plugin { @Override @@ -55,14 +57,24 @@ public class MockFSIndexStore extends IndexStore { } @Override public Settings additionalSettings() { - return Settings.builder().put(IndexModule.STORE_TYPE, "mock").build(); + return Settings.builder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "mock").build(); + } + + public void onModule(SettingsModule module) { + + module.registerSetting(INDEX_CHECK_INDEX_ON_CLOSE_SETTING); + module.registerSetting(MockFSDirectoryService.CRASH_INDEX_SETTING); + module.registerSetting(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING); + module.registerSetting(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING); + module.registerSetting(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING); + module.registerSetting(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING); } @Override public void onIndexModule(IndexModule indexModule) { Settings indexSettings = indexModule.getSettings(); - if ("mock".equals(indexSettings.get(IndexModule.STORE_TYPE))) { - if (indexSettings.getAsBoolean(CHECK_INDEX_ON_CLOSE, true)) { + if ("mock".equals(indexSettings.get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey()))) { + if (INDEX_CHECK_INDEX_ON_CLOSE_SETTING.get(indexSettings)) { indexModule.addIndexEventListener(new Listener()); } indexModule.addIndexStore("mock", MockFSIndexStore::new); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java index 8b395003576..98996932dc7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java @@ -69,7 +69,7 @@ public class AssertingLocalTransport extends LocalTransport { @Inject public AssertingLocalTransport(Settings settings, ThreadPool threadPool, Version version, NamedWriteableRegistry namedWriteableRegistry) { super(settings, threadPool, version, namedWriteableRegistry); - final long seed = settings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l); + final long seed = ESIntegTestCase.INDEX_TEST_SEED_SETTING.get(settings); random = new Random(seed); minVersion = settings.getAsVersion(ASSERTING_TRANSPORT_MIN_VERSION_KEY, Version.V_0_18_0); maxVersion = settings.getAsVersion(ASSERTING_TRANSPORT_MAX_VERSION_KEY, Version.CURRENT); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java index eefdb996e65..48d79ef064b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java @@ -16,9 +16,11 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.test.transport; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -26,6 +28,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.SendRequestTransportException; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; @@ -40,9 +43,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; /** A transport class that doesn't send anything but rather captures all requests for inspection from tests */ public class CapturingTransport implements Transport { + private TransportServiceAdapter adapter; static public class CapturedRequest { @@ -59,6 +65,7 @@ public class CapturingTransport implements Transport { } } + private ConcurrentMap> requests = new ConcurrentHashMap<>(); private BlockingQueue capturedRequests = ConcurrentCollections.newBlockingQueue(); /** returns all requests captured so far. Doesn't clear the captured request list. See {@link #clear()} */ @@ -120,14 +127,50 @@ public class CapturingTransport implements Transport { adapter.onResponseReceived(requestId).handleResponse(response); } - /** simulate a remote error for the given requesTId */ - public void handleResponse(final long requestId, final Throwable t) { - adapter.onResponseReceived(requestId).handleException(new RemoteTransportException("remote failure", t)); + /** + * simulate a local error for the given requestId, will be wrapped + * by a {@link SendRequestTransportException} + * + * @param requestId the id corresponding to the captured send + * request + * @param t the failure to wrap + */ + public void handleLocalError(final long requestId, final Throwable t) { + Tuple request = requests.get(requestId); + assert request != null; + this.handleError(requestId, new SendRequestTransportException(request.v1(), request.v2(), t)); } + /** + * simulate a remote error for the given requestId, will be wrapped + * by a {@link RemoteTransportException} + * + * @param requestId the id corresponding to the captured send + * request + * @param t the failure to wrap + */ + public void handleRemoteError(final long requestId, final Throwable t) { + this.handleError(requestId, new RemoteTransportException("remote failure", t)); + } + + /** + * simulate an error for the given requestId, unlike + * {@link #handleLocalError(long, Throwable)} and + * {@link #handleRemoteError(long, Throwable)}, the provided + * exception will not be wrapped but will be delivered to the + * transport layer as is + * + * @param requestId the id corresponding to the captured send + * request + * @param e the failure + */ + public void handleError(final long requestId, final TransportException e) { + adapter.onResponseReceived(requestId).handleException(e); + } @Override public void sendRequest(DiscoveryNode node, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException, TransportException { + requests.put(requestId, Tuple.tuple(node, action)); capturedRequests.add(new CapturedRequest(node, requestId, action, request)); } @@ -149,7 +192,6 @@ public class CapturingTransport implements Transport { @Override public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws Exception { - // WTF return new TransportAddress[0]; } @@ -217,4 +259,5 @@ public class CapturingTransport implements Transport { public List getLocalAddresses() { return Collections.emptyList(); } + }