diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 3cf1ccce824..ce2ecaff8cf 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -372,6 +372,7 @@ class BuildPlugin implements Plugin { systemProperty 'tests.artifact', project.name systemProperty 'tests.task', path systemProperty 'tests.security.manager', 'true' + systemProperty 'jna.nosys', 'true' // default test sysprop values systemProperty 'tests.ifNoTests', 'fail' systemProperty 'es.logger.level', 'WARN' diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 042e8d22529..67d0e167b8a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -112,9 +112,6 @@ public class PluginBuildPlugin extends BuildPlugin { include 'config/**' include 'bin/**' } - from('src/site') { - include '_site/**' - } } project.assemble.dependsOn(bundle) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy index dd5bcaedb0b..7b949b3e1da 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy @@ -36,15 +36,9 @@ class PluginPropertiesExtension { @Input String description - @Input - boolean jvm = true - @Input String classname - @Input - boolean site = false - @Input boolean isolated = true diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy index 51853f85e00..de3d060ff26 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy @@ -51,11 +51,11 @@ class PluginPropertiesTask extends Copy { if (extension.description == null) { throw new InvalidUserDataException('description is a required setting for esplugin') } - if (extension.jvm && extension.classname == null) { - throw new InvalidUserDataException('classname is a required setting for esplugin with jvm=true') + if (extension.classname == null) { + throw new InvalidUserDataException('classname is a required setting for esplugin') } doFirst { - if (extension.jvm && extension.isolated == false) { + if (extension.isolated == false) { String warning = "WARNING: Disabling plugin isolation in ${project.path} is deprecated and will be removed in the future" logger.warn("${'=' * warning.length()}\n${warning}\n${'=' * warning.length()}") } @@ -74,10 +74,8 @@ class PluginPropertiesTask extends Copy { 'version': extension.version, 'elasticsearchVersion': VersionProperties.elasticsearch, 'javaVersion': project.targetCompatibility as String, - 'jvm': extension.jvm as String, - 'site': extension.site as String, 'isolated': extension.isolated as String, - 'classname': extension.jvm ? extension.classname : 'NA' + 'classname': extension.classname ] } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index 2741019b751..c9db5657ba4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -57,12 +57,10 @@ class ClusterConfiguration { @Input Closure waitCondition = { NodeInfo node, AntBuilder ant -> File tmpFile = new File(node.cwd, 'wait.success') - ant.echo(message: "[${LocalDateTime.now()}] Waiting for elasticsearch node ${node.httpUri()}", level: "info") - ant.get(src: "http://${node.httpUri()}", + ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=${numNodes}", dest: tmpFile.toString(), ignoreerrors: true, // do not fail on error, so logging buffers can be flushed by the wait task retries: 10) - ant.echo(message: "[${LocalDateTime.now()}] Finished waiting for elasticsearch node ${node.httpUri()}. Reachable? ${tmpFile.exists()}", level: "info") return tmpFile.exists() } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 4b7c05ec2eb..0d116a32712 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -46,9 +46,9 @@ class ClusterFormationTasks { /** * Adds dependent tasks to the given task to start and stop a cluster with the given configuration. * - * Returns an object that will resolve at execution time of the given task to a uri for the cluster. + * Returns a NodeInfo object for the first node in the cluster. */ - static Object setup(Project project, Task task, ClusterConfiguration config) { + static NodeInfo setup(Project project, Task task, ClusterConfiguration config) { if (task.getEnabled() == false) { // no need to add cluster formation tasks if the task won't run! return @@ -66,7 +66,7 @@ class ClusterFormationTasks { task.dependsOn(wait) // delay the resolution of the uri by wrapping in a closure, so it is not used until read for tests - return "${-> nodes[0].transportUri()}" + return nodes[0] } /** Adds a dependency on the given distribution */ diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index b369d35c03a..b41b1822000 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -129,7 +129,7 @@ class NodeInfo { 'JAVA_HOME' : project.javaHome, 'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc ] - args.add("-Des.tests.portsfile=true") + args.add("-Des.node.portsfile=true") args.addAll(config.systemProperties.collect { key, value -> "-D${key}=${value}" }) for (Map.Entry property : System.properties.entrySet()) { if (property.getKey().startsWith('es.')) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 5656be57b8f..3bfe9d61018 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -20,7 +20,6 @@ package org.elasticsearch.gradle.test import com.carrotsearch.gradle.junit4.RandomizedTestingTask import org.elasticsearch.gradle.BuildPlugin -import org.gradle.api.GradleException import org.gradle.api.Task import org.gradle.api.internal.tasks.options.Option import org.gradle.api.plugins.JavaBasePlugin @@ -61,8 +60,12 @@ public class RestIntegTestTask extends RandomizedTestingTask { // this must run after all projects have been configured, so we know any project // references can be accessed as a fully configured project.gradle.projectsEvaluated { - Object clusterUri = ClusterFormationTasks.setup(project, this, clusterConfig) - systemProperty('tests.cluster', clusterUri) + NodeInfo node = ClusterFormationTasks.setup(project, this, clusterConfig) + systemProperty('tests.rest.cluster', "${-> node.httpUri()}") + // TODO: our "client" qa tests currently use the rest-test plugin. instead they should have their own plugin + // that sets up the test cluster and passes this transport uri instead of http uri. Until then, we pass + // both as separate sysprops + systemProperty('tests.cluster', "${-> node.transportUri()}") } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy index 0a2cc841282..fefd08fe4e5 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy @@ -41,6 +41,7 @@ public class StandaloneTestPlugin implements Plugin { ] RandomizedTestingTask test = project.tasks.create(testOptions) test.configure(BuildPlugin.commonTestConfig(project)) + BuildPlugin.configureCompile(project) test.classpath = project.sourceSets.test.runtimeClasspath test.testClassesDir project.sourceSets.test.output.classesDir test.mustRunAfter(project.precommit) diff --git a/buildSrc/src/main/resources/plugin-descriptor.properties b/buildSrc/src/main/resources/plugin-descriptor.properties index 4c676c26cad..e6a5f81882d 100644 --- a/buildSrc/src/main/resources/plugin-descriptor.properties +++ b/buildSrc/src/main/resources/plugin-descriptor.properties @@ -2,26 +2,13 @@ # This file must exist as 'plugin-descriptor.properties' at # the root directory of all plugins. # -# A plugin can be 'site', 'jvm', or both. -# -### example site plugin for "foo": -# -# foo.zip <-- zip file for the plugin, with this structure: -# _site/ <-- the contents that will be served -# plugin-descriptor.properties <-- example contents below: -# -# site=true -# description=My cool plugin -# version=1.0 -# -### example jvm plugin for "foo" +### example plugin for "foo" # # foo.zip <-- zip file for the plugin, with this structure: # .jar <-- classes, resources, dependencies # .jar <-- any number of jars # plugin-descriptor.properties <-- example contents below: # -# jvm=true # classname=foo.bar.BazPlugin # description=My cool plugin # version=2.0 @@ -38,21 +25,6 @@ version=${version} # # 'name': the plugin name name=${name} - -### mandatory elements for site plugins: -# -# 'site': set to true to indicate contents of the _site/ -# directory in the root of the plugin should be served. -site=${site} -# -### mandatory elements for jvm plugins : -# -# 'jvm': true if the 'classname' class should be loaded -# from jar files in the root directory of the plugin. -# Note that only jar files in the root directory are -# added to the classpath for the plugin! If you need -# other resources, package them into a resources jar. -jvm=${jvm} # # 'classname': the name of the class to load, fully-qualified. classname=${classname} diff --git a/buildSrc/version.properties b/buildSrc/version.properties index e073730fe12..c62fdf7d500 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 3.0.0-SNAPSHOT -lucene = 5.5.0-snapshot-1721183 +lucene = 5.5.0-snapshot-1725675 # optional dependencies spatial4j = 0.5 diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index 5f1a181fabb..67f256c6bd1 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -200,7 +200,7 @@ public class ActionModule extends AbstractModule { private final Map actions = new HashMap<>(); private final List> actionFilters = new ArrayList<>(); - static class ActionEntry { + static class ActionEntry, Response extends ActionResponse> { public final GenericAction action; public final Class> transportAction; public final Class[] supportTransportActions; @@ -229,7 +229,7 @@ public class ActionModule extends AbstractModule { * @param The request type. * @param The response type. */ - public void registerAction(GenericAction action, Class> transportAction, Class... supportTransportActions) { + public , Response extends ActionResponse> void registerAction(GenericAction action, Class> transportAction, Class... supportTransportActions) { actions.put(action.name(), new ActionEntry<>(action, transportAction, supportTransportActions)); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java index f5020a46b37..24e9cd05aad 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java @@ -22,14 +22,9 @@ package org.elasticsearch.action.admin.cluster.settings; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import static org.elasticsearch.cluster.ClusterState.builder; /** @@ -57,11 +52,11 @@ final class SettingsUpdater { boolean changed = false; Settings.Builder transientSettings = Settings.settingsBuilder(); transientSettings.put(currentState.metaData().transientSettings()); - changed |= apply(transientToApply, transientSettings, transientUpdates, "transient"); + changed |= clusterSettings.updateDynamicSettings(transientToApply, transientSettings, transientUpdates, "transient"); Settings.Builder persistentSettings = Settings.settingsBuilder(); persistentSettings.put(currentState.metaData().persistentSettings()); - changed |= apply(persistentToApply, persistentSettings, persistentUpdates, "persistent"); + changed |= clusterSettings.updateDynamicSettings(persistentToApply, persistentSettings, persistentUpdates, "persistent"); if (!changed) { return currentState; @@ -86,42 +81,5 @@ final class SettingsUpdater { return build; } - private boolean apply(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) { - boolean changed = false; - final Set toRemove = new HashSet<>(); - Settings.Builder settingsBuilder = Settings.settingsBuilder(); - for (Map.Entry entry : toApply.getAsMap().entrySet()) { - if (entry.getValue() == null) { - toRemove.add(entry.getKey()); - } else if (clusterSettings.isLoggerSetting(entry.getKey()) || clusterSettings.hasDynamicSetting(entry.getKey())) { - settingsBuilder.put(entry.getKey(), entry.getValue()); - updates.put(entry.getKey(), entry.getValue()); - changed = true; - } else { - throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable"); - } - } - changed |= applyDeletes(toRemove, target); - target.put(settingsBuilder.build()); - return changed; - } - - private final boolean applyDeletes(Set deletes, Settings.Builder builder) { - boolean changed = false; - for (String entry : deletes) { - Set keysToRemove = new HashSet<>(); - Set keySet = builder.internalMap().keySet(); - for (String key : keySet) { - if (Regex.simpleMatch(entry, key)) { - keysToRemove.add(key); - } - } - for (String key : keysToRemove) { - builder.remove(key); - changed = true; - } - } - return changed; - } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java index a76b714b31d..aaaf11e4534 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java @@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.replication.ReplicationRequest; +import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -37,7 +37,7 @@ import java.util.List; /** * Refresh action. */ -public class TransportRefreshAction extends TransportBroadcastReplicationAction { +public class TransportRefreshAction extends TransportBroadcastReplicationAction { @Inject public TransportRefreshAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, @@ -53,8 +53,8 @@ public class TransportRefreshAction extends TransportBroadcastReplicationAction< } @Override - protected ReplicationRequest newShardRequest(RefreshRequest request, ShardId shardId) { - return new ReplicationRequest(request, shardId); + protected BasicReplicationRequest newShardRequest(RefreshRequest request, ShardId shardId) { + return new BasicReplicationRequest(request, shardId); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index c78977fb362..7c9979e7374 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.replication.ReplicationRequest; +import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; @@ -41,7 +41,7 @@ import org.elasticsearch.transport.TransportService; /** * */ -public class TransportShardRefreshAction extends TransportReplicationAction { +public class TransportShardRefreshAction extends TransportReplicationAction { public static final String NAME = RefreshAction.NAME + "[s]"; @@ -51,7 +51,7 @@ public class TransportShardRefreshAction extends TransportReplicationAction shardOperationOnPrimary(MetaData metaData, ReplicationRequest shardRequest) throws Throwable { + protected Tuple shardOperationOnPrimary(MetaData metaData, BasicReplicationRequest shardRequest) throws Throwable { IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId().getIndex()).getShard(shardRequest.shardId().id()); indexShard.refresh("api"); logger.trace("{} refresh request executed on primary", indexShard.shardId()); @@ -68,7 +68,7 @@ public class TransportShardRefreshAction extends TransportReplicationAction { private final MetaDataIndexTemplateService indexTemplateService; + private final IndexScopedSettings indexScopedSettings; @Inject public TransportPutIndexTemplateAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, MetaDataIndexTemplateService indexTemplateService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, IndexScopedSettings indexScopedSettings) { super(settings, PutIndexTemplateAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutIndexTemplateRequest::new); this.indexTemplateService = indexTemplateService; + this.indexScopedSettings = indexScopedSettings; } @Override @@ -69,11 +73,13 @@ public class TransportPutIndexTemplateAction extends TransportMasterNodeAction

scheduledFuture; private final AtomicLong executionIdGen = new AtomicLong(); @@ -250,24 +250,24 @@ public class BulkProcessor implements Closeable { * (for example, if no id is provided, one will be generated, or usage of the create flag). */ public BulkProcessor add(IndexRequest request) { - return add((ActionRequest) request); + return add((ActionRequest) request); } /** * Adds an {@link DeleteRequest} to the list of actions to execute. */ public BulkProcessor add(DeleteRequest request) { - return add((ActionRequest) request); + return add((ActionRequest) request); } /** * Adds either a delete or an index request. */ - public BulkProcessor add(ActionRequest request) { + public BulkProcessor add(ActionRequest request) { return add(request, null); } - public BulkProcessor add(ActionRequest request, @Nullable Object payload) { + public BulkProcessor add(ActionRequest request, @Nullable Object payload) { internalAdd(request, payload); return this; } @@ -282,7 +282,7 @@ public class BulkProcessor implements Closeable { } } - private synchronized void internalAdd(ActionRequest request, @Nullable Object payload) { + private synchronized void internalAdd(ActionRequest request, @Nullable Object payload) { ensureOpen(); bulkRequest.add(request, payload); executeIfNeeded(); diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 02e0ea40d65..00260644892 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -56,7 +56,7 @@ public class BulkRequest extends ActionRequest implements Composite private static final int REQUEST_OVERHEAD = 50; - final List requests = new ArrayList<>(); + final List> requests = new ArrayList<>(); List payloads = null; protected TimeValue timeout = BulkShardRequest.DEFAULT_TIMEOUT; @@ -72,21 +72,21 @@ public class BulkRequest extends ActionRequest implements Composite * Creates a bulk request caused by some other request, which is provided as an * argument so that its headers and context can be copied to the new request */ - public BulkRequest(ActionRequest request) { + public BulkRequest(ActionRequest request) { super(request); } /** * Adds a list of requests to be executed. Either index or delete requests. */ - public BulkRequest add(ActionRequest... requests) { - for (ActionRequest request : requests) { + public BulkRequest add(ActionRequest... requests) { + for (ActionRequest request : requests) { add(request, null); } return this; } - public BulkRequest add(ActionRequest request) { + public BulkRequest add(ActionRequest request) { return add(request, null); } @@ -96,7 +96,7 @@ public class BulkRequest extends ActionRequest implements Composite * @param payload Optional payload * @return the current bulk request */ - public BulkRequest add(ActionRequest request, @Nullable Object payload) { + public BulkRequest add(ActionRequest request, @Nullable Object payload) { if (request instanceof IndexRequest) { add((IndexRequest) request, payload); } else if (request instanceof DeleteRequest) { @@ -112,8 +112,8 @@ public class BulkRequest extends ActionRequest implements Composite /** * Adds a list of requests to be executed. Either index or delete requests. */ - public BulkRequest add(Iterable requests) { - for (ActionRequest request : requests) { + public BulkRequest add(Iterable> requests) { + for (ActionRequest request : requests) { add(request); } return this; @@ -196,15 +196,14 @@ public class BulkRequest extends ActionRequest implements Composite /** * The list of requests in this bulk request. */ - public List requests() { + public List> requests() { return this.requests; } @Override - @SuppressWarnings("unchecked") public List subRequests() { List indicesRequests = new ArrayList<>(); - for (ActionRequest request : requests) { + for (ActionRequest request : requests) { assert request instanceof IndicesRequest; indicesRequests.add((IndicesRequest) request); } @@ -486,7 +485,7 @@ public class BulkRequest extends ActionRequest implements Composite if (requests.isEmpty()) { validationException = addValidationError("no requests added", validationException); } - for (ActionRequest request : requests) { + for (ActionRequest request : requests) { // We first check if refresh has been set if ((request instanceof DeleteRequest && ((DeleteRequest)request).refresh()) || (request instanceof UpdateRequest && ((UpdateRequest)request).refresh()) || @@ -535,7 +534,7 @@ public class BulkRequest extends ActionRequest implements Composite super.writeTo(out); out.writeByte(consistencyLevel.id()); out.writeVInt(requests.size()); - for (ActionRequest request : requests) { + for (ActionRequest request : requests) { if (request instanceof IndexRequest) { out.writeByte((byte) 0); } else if (request instanceof DeleteRequest) { diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java index 1e880505b97..74d531df0c4 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateShardRequest.java @@ -52,10 +52,6 @@ public class PercolateShardRequest extends BroadcastShardRequest { this.startTime = request.startTime; } - public PercolateShardRequest(ShardId shardId, OriginalIndices originalIndices) { - super(shardId, originalIndices); - } - PercolateShardRequest(ShardId shardId, PercolateRequest request) { super(shardId, request); this.documentType = request.documentType(); diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java index 1d29e6c3971..c2ae538085b 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportShardMultiPercolateAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.SingleShardRequest; @@ -109,7 +108,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi } - public static class Request extends SingleShardRequest implements IndicesRequest { + public static class Request extends SingleShardRequest implements IndicesRequest { private int shardId; private String preference; @@ -160,12 +159,8 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi items = new ArrayList<>(size); for (int i = 0; i < size; i++) { int slot = in.readVInt(); - OriginalIndices originalIndices = OriginalIndices.readOriginalIndices(in); - PercolateShardRequest shardRequest = new PercolateShardRequest(new ShardId(index, shardId), originalIndices); - shardRequest.documentType(in.readString()); - shardRequest.source(in.readBytesReference()); - shardRequest.docSource(in.readBytesReference()); - shardRequest.onlyCount(in.readBoolean()); + PercolateShardRequest shardRequest = new PercolateShardRequest(); + shardRequest.readFrom(in); Item item = new Item(slot, shardRequest); items.add(item); } @@ -179,11 +174,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi out.writeVInt(items.size()); for (Item item : items) { out.writeVInt(item.slot); - OriginalIndices.writeOriginalIndices(item.request.originalIndices(), out); - out.writeString(item.request.documentType()); - out.writeBytesReference(item.request.source()); - out.writeBytesReference(item.request.docSource()); - out.writeBoolean(item.request.onlyCount()); + item.request.writeTo(out); } } @@ -245,7 +236,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi shardResponse.readFrom(in); items.add(new Item(slot, shardResponse)); } else { - items.add(new Item(slot, (Throwable)in.readThrowable())); + items.add(new Item(slot, in.readThrowable())); } } } diff --git a/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java b/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java index b80cf9ddbd4..d753eda4c69 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/support/ActionFilter.java @@ -40,13 +40,15 @@ public interface ActionFilter { * Enables filtering the execution of an action on the request side, either by sending a response through the * {@link ActionListener} or by continuing the execution through the given {@link ActionFilterChain chain} */ - void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain); + , Response extends ActionResponse> void apply(Task task, String action, Request request, + ActionListener listener, ActionFilterChain chain); /** * Enables filtering the execution of an action on the response side, either by sending a response through the * {@link ActionListener} or by continuing the execution through the given {@link ActionFilterChain chain} */ - void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain); + void apply(String action, Response response, ActionListener listener, + ActionFilterChain chain); /** * A simple base class for injectable action filters that spares the implementation from handling the @@ -60,7 +62,8 @@ public interface ActionFilter { } @Override - public final void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + public final , Response extends ActionResponse> void apply(Task task, String action, Request request, + ActionListener listener, ActionFilterChain chain) { if (apply(action, request, listener)) { chain.proceed(task, action, request, listener); } @@ -70,10 +73,11 @@ public interface ActionFilter { * Applies this filter and returns {@code true} if the execution chain should proceed, or {@code false} * if it should be aborted since the filter already handled the request and called the given listener. */ - protected abstract boolean apply(String action, ActionRequest request, ActionListener listener); + protected abstract boolean apply(String action, ActionRequest request, ActionListener listener); @Override - public final void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public final void apply(String action, Response response, ActionListener listener, + ActionFilterChain chain) { if (apply(action, response, listener)) { chain.proceed(action, response, listener); } @@ -83,6 +87,6 @@ public interface ActionFilter { * Applies this filter and returns {@code true} if the execution chain should proceed, or {@code false} * if it should be aborted since the filter already handled the response by calling the given listener. */ - protected abstract boolean apply(String action, ActionResponse response, ActionListener listener); + protected abstract boolean apply(String action, ActionResponse response, ActionListener listener); } } diff --git a/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java b/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java index 9b1ae9b2693..54f55e187a9 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java +++ b/core/src/main/java/org/elasticsearch/action/support/ActionFilterChain.java @@ -27,17 +27,17 @@ import org.elasticsearch.tasks.Task; /** * A filter chain allowing to continue and process the transport action request */ -public interface ActionFilterChain { +public interface ActionFilterChain, Response extends ActionResponse> { /** * Continue processing the request. Should only be called if a response has not been sent through * the given {@link ActionListener listener} */ - void proceed(Task task, final String action, final ActionRequest request, final ActionListener listener); + void proceed(Task task, final String action, final Request request, final ActionListener listener); /** * Continue processing the response. Should only be called if a response has not been sent through * the given {@link ActionListener listener} */ - void proceed(final String action, final ActionResponse response, final ActionListener listener); + void proceed(final String action, final Response response, final ActionListener listener); } diff --git a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java index 93c96b24be3..c8f04852452 100644 --- a/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java +++ b/core/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java @@ -44,7 +44,7 @@ public final class AutoCreateIndex { @Inject public AutoCreateIndex(Settings settings, IndexNameExpressionResolver resolver) { this.resolver = resolver; - dynamicMappingDisabled = !settings.getAsBoolean(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, MapperService.INDEX_MAPPER_DYNAMIC_DEFAULT); + dynamicMappingDisabled = !MapperService.INDEX_MAPPER_DYNAMIC_SETTING.get(settings); String value = settings.get("action.auto_create_index"); if (value == null || Booleans.isExplicitTrue(value)) { needToCheck = true; diff --git a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index bd9556f0500..a439117fef6 100644 --- a/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -34,8 +34,8 @@ import java.util.function.Supplier; /** * A TransportAction that self registers a handler into the transport service */ -public abstract class HandledTransportAction extends TransportAction{ - +public abstract class HandledTransportAction, Response extends ActionResponse> + extends TransportAction { protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request) { super(settings, actionName, threadPool, actionFilters, indexNameExpressionResolver, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, request, ThreadPool.Names.SAME, new TransportHandler()); diff --git a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java index 3e0454550ba..584ff14e756 100644 --- a/core/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -40,7 +40,7 @@ import static org.elasticsearch.action.support.PlainActionFuture.newFuture; /** * */ -public abstract class TransportAction extends AbstractComponent { +public abstract class TransportAction, Response extends ActionResponse> extends AbstractComponent { protected final ThreadPool threadPool; protected final String actionName; @@ -66,7 +66,7 @@ public abstract class TransportAction listener) { + public final Task execute(Request request, ActionListener listener) { Task task = taskManager.register("transport", actionName, request); if (task == null) { execute(null, request, listener); @@ -85,6 +85,7 @@ public abstract class TransportAction listener) { @@ -103,7 +104,7 @@ public abstract class TransportAction(this, logger); + RequestFilterChain requestFilterChain = new RequestFilterChain<>(this, logger); requestFilterChain.proceed(task, actionName, request, listener); } } @@ -114,7 +115,8 @@ public abstract class TransportAction listener); - private static class RequestFilterChain implements ActionFilterChain { + private static class RequestFilterChain, Response extends ActionResponse> + implements ActionFilterChain { private final TransportAction action; private final AtomicInteger index = new AtomicInteger(); @@ -125,14 +127,15 @@ public abstract class TransportAction listener) { int i = index.getAndIncrement(); try { if (i < this.action.filters.length) { this.action.filters[i].apply(task, actionName, request, listener, this); } else if (i == this.action.filters.length) { - this.action.doExecute(task, (Request) request, new FilteredActionListener(actionName, listener, new ResponseFilterChain(this.action.filters, logger))); + this.action.doExecute(task, request, new FilteredActionListener(actionName, listener, + new ResponseFilterChain<>(this.action.filters, logger))); } else { listener.onFailure(new IllegalStateException("proceed was called too many times")); } @@ -143,12 +146,13 @@ public abstract class TransportAction listener) { assert false : "request filter chain should never be called on the response side"; } } - private static class ResponseFilterChain implements ActionFilterChain { + private static class ResponseFilterChain, Response extends ActionResponse> + implements ActionFilterChain { private final ActionFilter[] filters; private final AtomicInteger index; @@ -161,12 +165,12 @@ public abstract class TransportAction listener) { assert false : "response filter chain should never be called on the request side"; } - @Override @SuppressWarnings("unchecked") - public void proceed(String action, ActionResponse response, ActionListener listener) { + @Override + public void proceed(String action, Response response, ActionListener listener) { int i = index.decrementAndGet(); try { if (i >= 0) { @@ -186,10 +190,10 @@ public abstract class TransportAction implements ActionListener { private final String actionName; - private final ActionListener listener; - private final ResponseFilterChain chain; + private final ActionListener listener; + private final ResponseFilterChain chain; - private FilteredActionListener(String actionName, ActionListener listener, ResponseFilterChain chain) { + private FilteredActionListener(String actionName, ActionListener listener, ResponseFilterChain chain) { this.actionName = actionName; this.listener = listener; this.chain = chain; diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 3159c3ab2b6..be851cfa7e2 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -49,7 +49,7 @@ import java.util.function.Supplier; /** * */ -public abstract class TransportBroadcastAction +public abstract class TransportBroadcastAction, Response extends BroadcastResponse, ShardRequest extends BroadcastShardRequest, ShardResponse extends BroadcastShardResponse> extends HandledTransportAction { protected final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index e8f4a0d83cd..613de1aa923 100644 --- a/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -74,7 +74,7 @@ import java.util.function.Supplier; * @param the response to the client request * @param per-shard operation results */ -public abstract class TransportBroadcastByNodeAction, Response extends BroadcastResponse, ShardOperationResult extends Streamable> extends HandledTransportAction { @@ -447,10 +447,12 @@ public abstract class TransportBroadcastByNodeAction extends HandledTransportAction { +public abstract class TransportMasterNodeAction, Response extends ActionResponse> extends HandledTransportAction { protected final TransportService transportService; protected final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java index d0f64cbb9be..681b9f0a648 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeReadAction.java @@ -33,7 +33,8 @@ import java.util.function.Supplier; * A base class for read operations that needs to be performed on the master node. * Can also be executed on the local node if needed. */ -public abstract class TransportMasterNodeReadAction extends TransportMasterNodeAction { +public abstract class TransportMasterNodeReadAction, Response extends ActionResponse> + extends TransportMasterNodeAction { public static final String FORCE_LOCAL_SETTING = "action.master.force_local"; diff --git a/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java b/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java index ec9c3eb46c3..7e42036c1d1 100644 --- a/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/master/info/TransportClusterInfoAction.java @@ -33,7 +33,8 @@ import java.util.function.Supplier; /** */ -public abstract class TransportClusterInfoAction extends TransportMasterNodeReadAction { +public abstract class TransportClusterInfoAction, Response extends ActionResponse> + extends TransportMasterNodeReadAction { public TransportClusterInfoAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index b83081b86bd..f90d194287b 100644 --- a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -50,7 +50,7 @@ import java.util.function.Supplier; /** * */ -public abstract class TransportNodesAction extends HandledTransportAction { +public abstract class TransportNodesAction, NodesResponse extends BaseNodesResponse, NodeRequest extends BaseNodeRequest, NodeResponse extends BaseNodeResponse> extends HandledTransportAction { protected final ClusterName clusterName; protected final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java new file mode 100644 index 00000000000..3778275d400 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support.replication; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.index.shard.ShardId; + +/** + * A replication request that has no more information than ReplicationRequest. + * Unfortunately ReplicationRequest can't be declared as a type parameter + * because it has a self referential type parameter of its own. So use this + * instead. + */ +public class BasicReplicationRequest extends ReplicationRequest { + public BasicReplicationRequest() { + + } + + /** + * Creates a new request that inherits headers and context from the request + * provided as argument. + */ + public BasicReplicationRequest(ActionRequest request) { + super(request); + } + + /** + * Creates a new request with resolved shard id + */ + public BasicReplicationRequest(ActionRequest request, ShardId shardId) { + super(request, shardId); + } + + /** + * Copy constructor that creates a new request that is a copy of the one + * provided as an argument. + */ + protected BasicReplicationRequest(BasicReplicationRequest request) { + super(request); + } + +} diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java index 9b956918306..a6c9b8f65a3 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java @@ -38,7 +38,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * */ -public class ReplicationRequest> extends ActionRequest implements IndicesRequest { +public abstract class ReplicationRequest> extends ActionRequest implements IndicesRequest { public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES); diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index 33a9d349e80..ab88d73d3b0 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.support.replication; import com.carrotsearch.hppc.cursors.IntObjectCursor; + import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ReplicationResponse; @@ -52,7 +53,8 @@ import java.util.function.Supplier; * Base class for requests that should be executed on all shards of an index or several indices. * This action sends shard requests to all primary shards of the indices and they are then replicated like write requests */ -public abstract class TransportBroadcastReplicationAction extends HandledTransportAction { +public abstract class TransportBroadcastReplicationAction, Response extends BroadcastResponse, ShardRequest extends ReplicationRequest, ShardResponse extends ReplicationResponse> + extends HandledTransportAction { private final TransportReplicationAction replicatedBroadcastShardAction; private final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 0014404057f..b2972201808 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -64,7 +64,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BaseTransportResponseHandler; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.EmptyTransportResponseHandler; -import org.elasticsearch.transport.ReceiveTimeoutTransportException; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportChannelResponseHandler; import org.elasticsearch.transport.TransportException; @@ -76,6 +75,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; @@ -90,9 +90,7 @@ import java.util.function.Supplier; * primary node to validate request before primary operation followed by sampling state again for resolving * nodes with replica copies to perform replication. */ -public abstract class TransportReplicationAction extends TransportAction { - - public static final String SHARD_FAILURE_TIMEOUT = "action.support.replication.shard.failure_timeout"; +public abstract class TransportReplicationAction, ReplicaRequest extends ReplicationRequest, Response extends ReplicationResponse> extends TransportAction { protected final TransportService transportService; protected final ClusterService clusterService; @@ -101,7 +99,6 @@ public abstract class TransportReplicationAction extends HandledTransportAction { - +public abstract class TransportInstanceSingleOperationAction, Response extends ActionResponse> + extends HandledTransportAction { protected final ClusterService clusterService; protected final TransportService transportService; diff --git a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 47eebc9cfcd..1a77a411a94 100644 --- a/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -54,7 +54,7 @@ import static org.elasticsearch.action.support.TransportActions.isShardNotAvaila * the read operation can be performed on other shard copies. Concrete implementations can provide their own list * of candidate shards to try the read operation on. */ -public abstract class TransportSingleShardAction extends TransportAction { +public abstract class TransportSingleShardAction, Response extends ActionResponse> extends TransportAction { protected final ClusterService clusterService; diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index 43ad73b5dea..dc89ce3cb19 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -241,26 +241,26 @@ final class Security { */ static void addFilePermissions(Permissions policy, Environment environment) { // read-only dirs - addPath(policy, "path.home", environment.binFile(), "read,readlink"); - addPath(policy, "path.home", environment.libFile(), "read,readlink"); - addPath(policy, "path.home", environment.modulesFile(), "read,readlink"); - addPath(policy, "path.plugins", environment.pluginsFile(), "read,readlink"); - addPath(policy, "path.conf", environment.configFile(), "read,readlink"); - addPath(policy, "path.scripts", environment.scriptsFile(), "read,readlink"); + addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.binFile(), "read,readlink"); + addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.libFile(), "read,readlink"); + addPath(policy, Environment.PATH_HOME_SETTING.getKey(), environment.modulesFile(), "read,readlink"); + addPath(policy, Environment.PATH_PLUGINS_SETTING.getKey(), environment.pluginsFile(), "read,readlink"); + addPath(policy, Environment.PATH_CONF_SETTING.getKey(), environment.configFile(), "read,readlink"); + addPath(policy, Environment.PATH_SCRIPTS_SETTING.getKey(), environment.scriptsFile(), "read,readlink"); // read-write dirs addPath(policy, "java.io.tmpdir", environment.tmpFile(), "read,readlink,write,delete"); - addPath(policy, "path.logs", environment.logsFile(), "read,readlink,write,delete"); + addPath(policy, Environment.PATH_LOGS_SETTING.getKey(), environment.logsFile(), "read,readlink,write,delete"); if (environment.sharedDataFile() != null) { - addPath(policy, "path.shared_data", environment.sharedDataFile(), "read,readlink,write,delete"); + addPath(policy, Environment.PATH_SHARED_DATA_SETTING.getKey(), environment.sharedDataFile(), "read,readlink,write,delete"); } for (Path path : environment.dataFiles()) { - addPath(policy, "path.data", path, "read,readlink,write,delete"); + addPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete"); } for (Path path : environment.dataWithClusterFiles()) { - addPath(policy, "path.data", path, "read,readlink,write,delete"); + addPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete"); } for (Path path : environment.repoFiles()) { - addPath(policy, "path.repo", path, "read,readlink,write,delete"); + addPath(policy, Environment.PATH_REPO_SETTING.getKey(), path, "read,readlink,write,delete"); } if (environment.pidFile() != null) { // we just need permission to remove the file if its elsewhere. diff --git a/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java b/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java index 4cf5a5a961d..d9ddc56d48a 100644 --- a/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java +++ b/core/src/main/java/org/elasticsearch/client/ElasticsearchClient.java @@ -40,7 +40,8 @@ public interface ElasticsearchClient { * @param The request builder type. * @return A future allowing to get back the response. */ - > ActionFuture execute(final Action action, final Request request); + , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + final Action action, final Request request); /** * Executes a generic action, denoted by an {@link Action}. @@ -52,7 +53,8 @@ public interface ElasticsearchClient { * @param The response type. * @param The request builder type. */ - > void execute(final Action action, final Request request, ActionListener listener); + , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + final Action action, final Request request, ActionListener listener); /** * Prepares a request builder to execute, specified by {@link Action}. @@ -63,7 +65,8 @@ public interface ElasticsearchClient { * @param The request builder. * @return The request builder, that can, at a later stage, execute the request. */ - > RequestBuilder prepareExecute(final Action action); + , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + final Action action); /** * Returns the threadpool used to execute requests on this client diff --git a/core/src/main/java/org/elasticsearch/client/FilterClient.java b/core/src/main/java/org/elasticsearch/client/FilterClient.java index 06d81f0c9d5..77abceef17a 100644 --- a/core/src/main/java/org/elasticsearch/client/FilterClient.java +++ b/core/src/main/java/org/elasticsearch/client/FilterClient.java @@ -52,7 +52,8 @@ public abstract class FilterClient extends AbstractClient { } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + Action action, Request request, ActionListener listener) { in().execute(action, request, listener); } diff --git a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java index 65adfad64dc..4f64f63f8d7 100644 --- a/core/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/core/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -56,7 +56,8 @@ public class NodeClient extends AbstractClient { @SuppressWarnings("unchecked") @Override - public > void doExecute(Action action, Request request, ActionListener listener) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + Action action, Request request, ActionListener listener) { TransportAction transportAction = actions.get(action); if (transportAction == null) { throw new IllegalStateException("failed to find action [" + action + "] to execute"); diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index e5a465442bb..e5e1bea6cb5 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -363,12 +363,14 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public final > RequestBuilder prepareExecute(final Action action) { + public final , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + final Action action) { return action.newRequestBuilder(this); } @Override - public final > ActionFuture execute(Action action, Request request) { + public final , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + Action action, Request request) { PlainActionFuture actionFuture = PlainActionFuture.newFuture(); execute(action, request, actionFuture); return actionFuture; @@ -378,13 +380,14 @@ public abstract class AbstractClient extends AbstractComponent implements Client * This is the single execution point of *all* clients. */ @Override - public final > void execute(Action action, Request request, ActionListener listener) { + public final , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + Action action, Request request, ActionListener listener) { headers.applyTo(request); listener = threadedWrapper.wrap(listener); doExecute(action, request, listener); } - protected abstract > void doExecute(final Action action, final Request request, ActionListener listener); + protected abstract , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute(final Action action, final Request request, ActionListener listener); @Override public ActionFuture index(final IndexRequest request) { @@ -821,17 +824,20 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public > ActionFuture execute(Action action, Request request) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + Action action, Request request) { return client.execute(action, request); } @Override - public > void execute(Action action, Request request, ActionListener listener) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + Action action, Request request, ActionListener listener) { client.execute(action, request, listener); } @Override - public > RequestBuilder prepareExecute(Action action) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + Action action) { return client.prepareExecute(action); } @@ -1178,17 +1184,20 @@ public abstract class AbstractClient extends AbstractComponent implements Client } @Override - public > ActionFuture execute(Action action, Request request) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> ActionFuture execute( + Action action, Request request) { return client.execute(action, request); } @Override - public > void execute(Action action, Request request, ActionListener listener) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void execute( + Action action, Request request, ActionListener listener) { client.execute(action, request, listener); } @Override - public > RequestBuilder prepareExecute(Action action) { + public , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> RequestBuilder prepareExecute( + Action action) { return client.prepareExecute(action); } diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 3b8be668f43..3d68e642c46 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -19,6 +19,10 @@ package org.elasticsearch.client.transport; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; @@ -36,6 +40,7 @@ import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.ModulesBuilder; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; @@ -54,10 +59,6 @@ import org.elasticsearch.threadpool.ThreadPoolModule; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.netty.NettyTransport; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - import static org.elasticsearch.common.settings.Settings.settingsBuilder; /** @@ -81,7 +82,7 @@ public class TransportClient extends AbstractClient { */ public static class Builder { - private Settings settings = Settings.EMPTY; + private Settings providedSettings = Settings.EMPTY; private List> pluginClasses = new ArrayList<>(); /** @@ -95,7 +96,7 @@ public class TransportClient extends AbstractClient { * The settings to configure the transport client with. */ public Builder settings(Settings settings) { - this.settings = settings; + this.providedSettings = settings; return this; } @@ -107,27 +108,29 @@ public class TransportClient extends AbstractClient { return this; } + private PluginsService newPluginService(final Settings settings) { + final Settings.Builder settingsBuilder = settingsBuilder() + .put(NettyTransport.PING_SCHEDULE, "5s") // enable by default the transport schedule ping interval + .put( InternalSettingsPreparer.prepareSettings(settings)) + .put("network.server", false) + .put("node.client", true) + .put(CLIENT_TYPE_SETTING, CLIENT_TYPE); + return new PluginsService(settingsBuilder.build(), null, null, pluginClasses); + }; + /** * Builds a new instance of the transport client. */ public TransportClient build() { - Settings settings = InternalSettingsPreparer.prepareSettings(this.settings); - settings = settingsBuilder() - .put(NettyTransport.PING_SCHEDULE, "5s") // enable by default the transport schedule ping interval - .put(settings) - .put("network.server", false) - .put("node.client", true) - .put(CLIENT_TYPE_SETTING, CLIENT_TYPE) - .build(); - - PluginsService pluginsService = new PluginsService(settings, null, null, pluginClasses); - this.settings = pluginsService.updatedSettings(); + final PluginsService pluginsService = newPluginService(providedSettings); + final Settings settings = pluginsService.updatedSettings(); Version version = Version.CURRENT; final ThreadPool threadPool = new ThreadPool(settings); final NetworkService networkService = new NetworkService(settings); final SettingsFilter settingsFilter = new SettingsFilter(settings); + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); boolean success = false; try { ModulesBuilder modules = new ModulesBuilder(); @@ -137,18 +140,18 @@ public class TransportClient extends AbstractClient { modules.add(pluginModule); } modules.add(new PluginsModule(pluginsService)); - modules.add(new SettingsModule(this.settings, settingsFilter )); - modules.add(new NetworkModule(networkService, this.settings, true)); - modules.add(new ClusterNameModule(this.settings)); + modules.add(new SettingsModule(settings, settingsFilter )); + modules.add(new NetworkModule(networkService, settings, true, namedWriteableRegistry)); + modules.add(new ClusterNameModule(settings)); modules.add(new ThreadPoolModule(threadPool)); - modules.add(new SearchModule() { + modules.add(new SearchModule(settings, namedWriteableRegistry) { @Override protected void configure() { // noop } }); modules.add(new ActionModule(true)); - modules.add(new CircuitBreakerModule(this.settings)); + modules.add(new CircuitBreakerModule(settings)); pluginsService.processModules(modules); @@ -276,7 +279,7 @@ public class TransportClient extends AbstractClient { } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute(Action action, Request request, ActionListener listener) { proxy.execute(action, request, listener); } } diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 56befbb9b84..99c70255ca8 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -101,6 +102,11 @@ public class TransportClientNodesService extends AbstractComponent { private volatile boolean closed; + + public static final Setting CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL = Setting.positiveTimeSetting("client.transport.nodes_sampler_interval", timeValueSeconds(5), false, Setting.Scope.CLUSTER); + public static final Setting CLIENT_TRANSPORT_PING_TIMEOUT = Setting.positiveTimeSetting("client.transport.ping_timeout", timeValueSeconds(5), false, Setting.Scope.CLUSTER); + public static final Setting CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME = Setting.boolSetting("client.transport.ignore_cluster_name", false, false, Setting.Scope.CLUSTER); + @Inject public TransportClientNodesService(Settings settings, ClusterName clusterName, TransportService transportService, ThreadPool threadPool, Headers headers, Version version) { @@ -111,9 +117,9 @@ public class TransportClientNodesService extends AbstractComponent { this.minCompatibilityVersion = version.minimumCompatibilityVersion(); this.headers = headers; - this.nodesSamplerInterval = this.settings.getAsTime("client.transport.nodes_sampler_interval", timeValueSeconds(5)); - this.pingTimeout = this.settings.getAsTime("client.transport.ping_timeout", timeValueSeconds(5)).millis(); - this.ignoreClusterName = this.settings.getAsBoolean("client.transport.ignore_cluster_name", false); + this.nodesSamplerInterval = CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL.get(this.settings); + this.pingTimeout = CLIENT_TRANSPORT_PING_TIMEOUT.get(this.settings).millis(); + this.ignoreClusterName = CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME.get(this.settings); if (logger.isDebugEnabled()) { logger.debug("node_sampler_interval[" + nodesSamplerInterval + "]"); diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 670d7c09211..626b020c56c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction; import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateFilter; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; @@ -36,7 +35,6 @@ import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService; import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.RoutingService; -import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; @@ -56,26 +54,12 @@ import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocatio import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.InternalClusterService; -import org.elasticsearch.cluster.settings.DynamicSettings; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.gateway.GatewayAllocator; -import org.elasticsearch.gateway.PrimaryShardAllocator; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexingSlowLog; -import org.elasticsearch.index.search.stats.SearchSlowLog; -import org.elasticsearch.index.settings.IndexDynamicSettings; -import org.elasticsearch.index.MergePolicyConfig; -import org.elasticsearch.index.MergeSchedulerConfig; -import org.elasticsearch.index.store.IndexStore; -import org.elasticsearch.indices.IndicesWarmer; -import org.elasticsearch.indices.cache.request.IndicesRequestCache; -import org.elasticsearch.indices.ttl.IndicesTTLService; -import org.elasticsearch.search.internal.DefaultSearchContext; import java.util.Arrays; import java.util.Collections; @@ -106,7 +90,6 @@ public class ClusterModule extends AbstractModule { SnapshotInProgressAllocationDecider.class)); private final Settings settings; - private final DynamicSettings.Builder indexDynamicSettings = new DynamicSettings.Builder(); private final ExtensionPoint.SelectedType shardsAllocators = new ExtensionPoint.SelectedType<>("shards_allocator", ShardsAllocator.class); private final ExtensionPoint.ClassSet allocationDeciders = new ExtensionPoint.ClassSet<>("allocation_decider", AllocationDecider.class, AllocationDeciders.class); private final ExtensionPoint.ClassSet indexTemplateFilters = new ExtensionPoint.ClassSet<>("index_template_filter", IndexTemplateFilter.class); @@ -116,9 +99,6 @@ public class ClusterModule extends AbstractModule { public ClusterModule(Settings settings) { this.settings = settings; - - registerBuiltinIndexSettings(); - for (Class decider : ClusterModule.DEFAULT_ALLOCATION_DECIDERS) { registerAllocationDecider(decider); } @@ -126,68 +106,6 @@ public class ClusterModule extends AbstractModule { registerShardsAllocator(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR, BalancedShardsAllocator.class); } - private void registerBuiltinIndexSettings() { - registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE); - registerIndexDynamicSetting(IndexStore.INDEX_STORE_THROTTLE_TYPE, Validator.EMPTY); - registerIndexDynamicSetting(MergeSchedulerConfig.MAX_THREAD_COUNT, Validator.NON_NEGATIVE_INTEGER); - registerIndexDynamicSetting(MergeSchedulerConfig.MAX_MERGE_COUNT, Validator.EMPTY); - registerIndexDynamicSetting(MergeSchedulerConfig.AUTO_THROTTLE, Validator.EMPTY); - registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_REQUIRE_GROUP + "*", Validator.EMPTY); - registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "*", Validator.EMPTY); - registerIndexDynamicSetting(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "*", Validator.EMPTY); - registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY); - registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, Validator.NON_NEGATIVE_INTEGER); - registerIndexDynamicSetting(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_READ_ONLY, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_BLOCKS_READ, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_BLOCKS_WRITE, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_BLOCKS_METADATA, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, Validator.EMPTY); - registerIndexDynamicSetting(IndexMetaData.SETTING_PRIORITY, Validator.NON_NEGATIVE_INTEGER); - registerIndexDynamicSetting(IndicesTTLService.INDEX_TTL_DISABLE_PURGE, Validator.EMPTY); - registerIndexDynamicSetting(IndexSettings.INDEX_REFRESH_INTERVAL, Validator.TIME); - registerIndexDynamicSetting(PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS, Validator.EMPTY); - registerIndexDynamicSetting(IndexSettings.INDEX_GC_DELETES_SETTING, Validator.TIME); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN, Validator.TIME); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO, Validator.TIME); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG, Validator.TIME); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE, Validator.TIME); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT, Validator.EMPTY); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL, Validator.EMPTY); - registerIndexDynamicSetting(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG, Validator.EMPTY); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE, Validator.TIME); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT, Validator.EMPTY); - registerIndexDynamicSetting(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL, Validator.EMPTY); - registerIndexDynamicSetting(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE, Validator.INTEGER); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED, Validator.DOUBLE); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT, Validator.BYTES_SIZE); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, Validator.INTEGER_GTE_2); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT, Validator.INTEGER_GTE_2); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT, Validator.BYTES_SIZE); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, Validator.DOUBLE_GTE_2); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, Validator.NON_NEGATIVE_DOUBLE); - registerIndexDynamicSetting(MergePolicyConfig.INDEX_COMPOUND_FORMAT, Validator.EMPTY); - registerIndexDynamicSetting(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, Validator.BYTES_SIZE); - registerIndexDynamicSetting(IndexSettings.INDEX_TRANSLOG_DURABILITY, Validator.EMPTY); - registerIndexDynamicSetting(IndicesWarmer.INDEX_WARMER_ENABLED, Validator.EMPTY); - registerIndexDynamicSetting(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, Validator.BOOLEAN); - registerIndexDynamicSetting(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, Validator.TIME); - registerIndexDynamicSetting(DefaultSearchContext.MAX_RESULT_WINDOW, Validator.POSITIVE_INTEGER); - } - - public void registerIndexDynamicSetting(String setting, Validator validator) { - indexDynamicSettings.addSetting(setting, validator); - } - - public void registerAllocationDecider(Class allocationDecider) { allocationDeciders.registerExtension(allocationDecider); } @@ -202,8 +120,6 @@ public class ClusterModule extends AbstractModule { @Override protected void configure() { - bind(DynamicSettings.class).annotatedWith(IndexDynamicSettings.class).toInstance(indexDynamicSettings.build()); - // bind ShardsAllocator String shardsAllocatorType = shardsAllocators.bindType(binder(), settings, ClusterModule.SHARDS_ALLOCATOR_TYPE_KEY, ClusterModule.BALANCED_ALLOCATOR); if (shardsAllocatorType.equals(ClusterModule.EVEN_SHARD_COUNT_ALLOCATOR)) { diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 925a5a12ed6..5107b4495ab 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -406,10 +406,26 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu String nodeId = nodeStats.getNode().id(); String nodeName = nodeStats.getNode().getName(); if (logger.isTraceEnabled()) { - logger.trace("node: [{}], most available: total disk: {}, available disk: {} / least available: total disk: {}, available disk: {}", nodeId, mostAvailablePath.getTotal(), leastAvailablePath.getAvailable(), leastAvailablePath.getTotal(), leastAvailablePath.getAvailable()); + logger.trace("node: [{}], most available: total disk: {}, available disk: {} / least available: total disk: {}, available disk: {}", + nodeId, mostAvailablePath.getTotal(), leastAvailablePath.getAvailable(), + leastAvailablePath.getTotal(), leastAvailablePath.getAvailable()); + } + if (leastAvailablePath.getTotal().bytes() < 0) { + if (logger.isTraceEnabled()) { + logger.trace("node: [{}] least available path has less than 0 total bytes of disk [{}], skipping", + nodeId, leastAvailablePath.getTotal().bytes()); + } + } else { + newLeastAvaiableUsages.put(nodeId, new DiskUsage(nodeId, nodeName, leastAvailablePath.getPath(), leastAvailablePath.getTotal().bytes(), leastAvailablePath.getAvailable().bytes())); + } + if (mostAvailablePath.getTotal().bytes() < 0) { + if (logger.isTraceEnabled()) { + logger.trace("node: [{}] most available path has less than 0 total bytes of disk [{}], skipping", + nodeId, mostAvailablePath.getTotal().bytes()); + } + } else { + newMostAvaiableUsages.put(nodeId, new DiskUsage(nodeId, nodeName, mostAvailablePath.getPath(), mostAvailablePath.getTotal().bytes(), mostAvailablePath.getAvailable().bytes())); } - newLeastAvaiableUsages.put(nodeId, new DiskUsage(nodeId, nodeName, leastAvailablePath.getPath(), leastAvailablePath.getTotal().bytes(), leastAvailablePath.getAvailable().bytes())); - newMostAvaiableUsages.put(nodeId, new DiskUsage(nodeId, nodeName, mostAvailablePath.getPath(), mostAvailablePath.getTotal().bytes(), mostAvailablePath.getAvailable().bytes())); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 00a238504f2..3a837d81a7e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -22,9 +22,11 @@ package org.elasticsearch.cluster.action.shard; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.MasterNodeChangePredicate; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -42,13 +44,16 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.discovery.Discovery; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.EmptyTransportResponseHandler; +import org.elasticsearch.transport.NodeDisconnectedException; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; -import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; @@ -60,55 +65,95 @@ import java.util.Locale; import static org.elasticsearch.cluster.routing.ShardRouting.readShardRoutingEntry; public class ShardStateAction extends AbstractComponent { + public static final String SHARD_STARTED_ACTION_NAME = "internal:cluster/shard/started"; public static final String SHARD_FAILED_ACTION_NAME = "internal:cluster/shard/failure"; private final TransportService transportService; + private final ClusterService clusterService; @Inject public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) { super(settings); this.transportService = transportService; + this.clusterService = clusterService; transportService.registerRequestHandler(SHARD_STARTED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardStartedTransportHandler(clusterService, new ShardStartedClusterStateTaskExecutor(allocationService, logger), logger)); transportService.registerRequestHandler(SHARD_FAILED_ACTION_NAME, ShardRoutingEntry::new, ThreadPool.Names.SAME, new ShardFailedTransportHandler(clusterService, new ShardFailedClusterStateTaskExecutor(allocationService, routingService, logger), logger)); } - public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { - shardFailed(clusterState, shardRouting, indexUUID, message, failure, null, listener); - } - - public void resendShardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { - logger.trace("{} re-sending failed shard [{}], index UUID [{}], reason [{}]", shardRouting.shardId(), failure, shardRouting, indexUUID, message); - shardFailed(clusterState, shardRouting, indexUUID, message, failure, listener); - } - - public void shardFailed(final ClusterState clusterState, final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, TimeValue timeout, Listener listener) { - DiscoveryNode masterNode = clusterState.nodes().masterNode(); + private void sendShardAction(final String actionName, final ClusterStateObserver observer, final ShardRoutingEntry shardRoutingEntry, final Listener listener) { + DiscoveryNode masterNode = observer.observedState().nodes().masterNode(); if (masterNode == null) { - logger.warn("{} no master known to fail shard [{}]", shardRouting.shardId(), shardRouting); - listener.onShardFailedNoMaster(); - return; - } - ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, failure); - TransportRequestOptions options = TransportRequestOptions.EMPTY; - if (timeout != null) { - options = TransportRequestOptions.builder().withTimeout(timeout).build(); - } - transportService.sendRequest(masterNode, - SHARD_FAILED_ACTION_NAME, shardRoutingEntry, options, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { - @Override - public void handleResponse(TransportResponse.Empty response) { - listener.onSuccess(); - } + logger.warn("{} no master known for action [{}] for shard [{}]", shardRoutingEntry.getShardRouting().shardId(), actionName, shardRoutingEntry.getShardRouting()); + waitForNewMasterAndRetry(actionName, observer, shardRoutingEntry, listener); + } else { + logger.debug("{} sending [{}] to [{}] for shard [{}]", shardRoutingEntry.getShardRouting().getId(), actionName, masterNode.getId(), shardRoutingEntry); + transportService.sendRequest(masterNode, + actionName, shardRoutingEntry, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { + @Override + public void handleResponse(TransportResponse.Empty response) { + listener.onSuccess(); + } - @Override - public void handleException(TransportException exp) { - logger.warn("{} unexpected failure while sending request to [{}] to fail shard [{}]", exp, shardRoutingEntry.shardRouting.shardId(), masterNode, shardRoutingEntry); - listener.onShardFailedFailure(masterNode, exp); + @Override + public void handleException(TransportException exp) { + if (isMasterChannelException(exp)) { + waitForNewMasterAndRetry(actionName, observer, shardRoutingEntry, listener); + } else { + logger.warn("{} unexpected failure while sending request [{}] to [{}] for shard [{}]", exp, shardRoutingEntry.getShardRouting().shardId(), actionName, masterNode, shardRoutingEntry); + listener.onFailure(exp.getCause()); + } + } + }); + } + } + + private static Class[] MASTER_CHANNEL_EXCEPTIONS = new Class[]{ + NotMasterException.class, + ConnectTransportException.class, + Discovery.FailedToCommitClusterStateException.class + }; + + private static boolean isMasterChannelException(TransportException exp) { + return ExceptionsHelper.unwrap(exp, MASTER_CHANNEL_EXCEPTIONS) != null; + } + + public void shardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { + ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger); + ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, failure); + sendShardAction(SHARD_FAILED_ACTION_NAME, observer, shardRoutingEntry, listener); + } + + public void resendShardFailed(final ShardRouting shardRouting, final String indexUUID, final String message, @Nullable final Throwable failure, Listener listener) { + logger.trace("{} re-sending failed shard [{}], index UUID [{}], reason [{}]", shardRouting.shardId(), failure, shardRouting, indexUUID, message); + shardFailed(shardRouting, indexUUID, message, failure, listener); + } + + // visible for testing + protected void waitForNewMasterAndRetry(String actionName, ClusterStateObserver observer, ShardRoutingEntry shardRoutingEntry, Listener listener) { + observer.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + if (logger.isTraceEnabled()) { + logger.trace("new cluster state [{}] after waiting for master election to fail shard [{}]", shardRoutingEntry.getShardRouting().shardId(), state.prettyPrint(), shardRoutingEntry); } - }); + sendShardAction(actionName, observer, shardRoutingEntry, listener); + } + + @Override + public void onClusterServiceClose() { + logger.warn("{} node closed while execution action [{}] for shard [{}]", shardRoutingEntry.failure, shardRoutingEntry.getShardRouting().getId(), actionName, shardRoutingEntry.getShardRouting()); + listener.onFailure(new NodeClosedException(clusterService.localNode())); + } + + @Override + public void onTimeout(TimeValue timeout) { + // we wait indefinitely for a new master + assert false; + } + }, MasterNodeChangePredicate.INSTANCE); } private static class ShardFailedTransportHandler implements TransportRequestHandler { @@ -208,21 +253,10 @@ public class ShardStateAction extends AbstractComponent { } } - public void shardStarted(final ClusterState clusterState, final ShardRouting shardRouting, String indexUUID, final String reason) { - DiscoveryNode masterNode = clusterState.nodes().masterNode(); - if (masterNode == null) { - logger.warn("{} no master known to start shard [{}]", shardRouting.shardId(), shardRouting); - return; - } - ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, reason, null); - logger.debug("sending start shard [{}]", shardRoutingEntry); - transportService.sendRequest(masterNode, - SHARD_STARTED_ACTION_NAME, new ShardRoutingEntry(shardRouting, indexUUID, reason, null), new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { - @Override - public void handleException(TransportException exp) { - logger.warn("{} failure sending start shard [{}] to [{}]", exp, shardRouting.shardId(), masterNode, shardRouting); - } - }); + public void shardStarted(final ShardRouting shardRouting, String indexUUID, final String message, Listener listener) { + ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger); + ShardRoutingEntry shardRoutingEntry = new ShardRoutingEntry(shardRouting, indexUUID, message, null); + sendShardAction(SHARD_STARTED_ACTION_NAME, observer, shardRoutingEntry, listener); } private static class ShardStartedTransportHandler implements TransportRequestHandler { @@ -334,10 +368,23 @@ public class ShardStateAction extends AbstractComponent { default void onSuccess() { } - default void onShardFailedNoMaster() { - } - - default void onShardFailedFailure(final DiscoveryNode master, final TransportException e) { + /** + * Notification for non-channel exceptions that are not handled + * by {@link ShardStateAction}. + * + * The exceptions that are handled by {@link ShardStateAction} + * are: + * - {@link NotMasterException} + * - {@link NodeDisconnectedException} + * - {@link Discovery.FailedToCommitClusterStateException} + * + * Any other exception is communicated to the requester via + * this notification. + * + * @param t the unexpected cause of the failure on the master + */ + default void onFailure(final Throwable t) { } } + } diff --git a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index 1bab607ee85..0006c7da8c9 100644 --- a/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/core/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -306,16 +306,16 @@ public class ClusterBlocks extends AbstractDiffable { if (indexMetaData.getState() == IndexMetaData.State.CLOSE) { addIndexBlock(indexMetaData.getIndex(), MetaDataIndexStateService.INDEX_CLOSED_BLOCK); } - if (indexMetaData.getSettings().getAsBoolean(IndexMetaData.SETTING_READ_ONLY, false)) { + if (IndexMetaData.INDEX_READ_ONLY_SETTING.get(indexMetaData.getSettings())) { addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_ONLY_BLOCK); } - if (indexMetaData.getSettings().getAsBoolean(IndexMetaData.SETTING_BLOCKS_READ, false)) { + if (IndexMetaData.INDEX_BLOCKS_READ_SETTING.get(indexMetaData.getSettings())) { addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_READ_BLOCK); } - if (indexMetaData.getSettings().getAsBoolean(IndexMetaData.SETTING_BLOCKS_WRITE, false)) { + if (IndexMetaData.INDEX_BLOCKS_WRITE_SETTING.get(indexMetaData.getSettings())) { addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_WRITE_BLOCK); } - if (indexMetaData.getSettings().getAsBoolean(IndexMetaData.SETTING_BLOCKS_METADATA, false)) { + if (IndexMetaData.INDEX_BLOCKS_METADATA_SETTING.get(indexMetaData.getSettings())) { addIndexBlock(indexMetaData.getIndex(), IndexMetaData.INDEX_METADATA_BLOCK); } return this; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java new file mode 100644 index 00000000000..d9b288bb897 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java @@ -0,0 +1,92 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.settings.Setting; + +/** + * This class acts as a functional wrapper around the index.auto_expand_replicas setting. + * This setting or rather it's value is expanded into a min and max value which requires special handling + * based on the number of datanodes in the cluster. This class handles all the parsing and streamlines the access to these values. + */ +final class AutoExpandReplicas { + // the value we recognize in the "max" position to mean all the nodes + private static final String ALL_NODES_VALUE = "all"; + public static final Setting SETTING = new Setting<>(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "false", (value) -> { + final int min; + final int max; + if (Booleans.parseBoolean(value, true) == false) { + return new AutoExpandReplicas(0, 0, false); + } + final int dash = value.indexOf('-'); + if (-1 == dash) { + throw new IllegalArgumentException("failed to parse [" + IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS + "] from value: [" + value + "] at index " + dash); + } + final String sMin = value.substring(0, dash); + try { + min = Integer.parseInt(sMin); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("failed to parse [" + IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS + "] from value: [" + value + "] at index " + dash, e); + } + String sMax = value.substring(dash + 1); + if (sMax.equals(ALL_NODES_VALUE)) { + max = Integer.MAX_VALUE; + } else { + try { + max = Integer.parseInt(sMax); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("failed to parse [" + IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS + "] from value: [" + value + "] at index " + dash, e); + } + } + return new AutoExpandReplicas(min, max, true); + }, true, Setting.Scope.INDEX); + + private final int minReplicas; + private final int maxReplicas; + private final boolean enabled; + + private AutoExpandReplicas(int minReplicas, int maxReplicas, boolean enabled) { + if (minReplicas > maxReplicas) { + throw new IllegalArgumentException("[" + IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS + "] minReplicas must be =< maxReplicas but wasn't " + minReplicas + " > " + maxReplicas); + } + this.minReplicas = minReplicas; + this.maxReplicas = maxReplicas; + this.enabled = enabled; + } + + int getMinReplicas() { + return minReplicas; + } + + int getMaxReplicas(int numDataNodes) { + return Math.min(maxReplicas, numDataNodes-1); + } + + @Override + public String toString() { + return enabled ? minReplicas + "-" + maxReplicas : "false"; + } + + boolean isEnabled() { + return enabled; + } +} + + diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index c1f10079768..b2772c0b2c1 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -29,14 +29,17 @@ import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.node.DiscoveryNodeFilters; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.FromXContentBuilder; @@ -58,6 +61,7 @@ import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.AND; import static org.elasticsearch.cluster.node.DiscoveryNodeFilters.OpType.OR; @@ -70,10 +74,6 @@ import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; */ public class IndexMetaData implements Diffable, FromXContentBuilder, ToXContent { - public static final IndexMetaData PROTO = IndexMetaData.builder("") - .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1).numberOfReplicas(0).build(); - public interface Custom extends Diffable, ToXContent { String type(); @@ -152,14 +152,29 @@ public class IndexMetaData implements Diffable, FromXContentBuild } public static final String INDEX_SETTING_PREFIX = "index."; public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards"; + public static final Setting INDEX_NUMBER_OF_SHARDS_SETTING = Setting.intSetting(SETTING_NUMBER_OF_SHARDS, 5, 1, false, Setting.Scope.INDEX); public static final String SETTING_NUMBER_OF_REPLICAS = "index.number_of_replicas"; + public static final Setting INDEX_NUMBER_OF_REPLICAS_SETTING = Setting.intSetting(SETTING_NUMBER_OF_REPLICAS, 1, 0, true, Setting.Scope.INDEX); public static final String SETTING_SHADOW_REPLICAS = "index.shadow_replicas"; + public static final Setting INDEX_SHADOW_REPLICAS_SETTING = Setting.boolSetting(SETTING_SHADOW_REPLICAS, false, false, Setting.Scope.INDEX); + public static final String SETTING_SHARED_FILESYSTEM = "index.shared_filesystem"; + public static final Setting INDEX_SHARED_FILESYSTEM_SETTING = Setting.boolSetting(SETTING_SHARED_FILESYSTEM, false, false, Setting.Scope.INDEX); + public static final String SETTING_AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas"; + public static final Setting INDEX_AUTO_EXPAND_REPLICAS_SETTING = AutoExpandReplicas.SETTING; public static final String SETTING_READ_ONLY = "index.blocks.read_only"; + public static final Setting INDEX_READ_ONLY_SETTING = Setting.boolSetting(SETTING_READ_ONLY, false, true, Setting.Scope.INDEX); + public static final String SETTING_BLOCKS_READ = "index.blocks.read"; + public static final Setting INDEX_BLOCKS_READ_SETTING = Setting.boolSetting(SETTING_BLOCKS_READ, false, true, Setting.Scope.INDEX); + public static final String SETTING_BLOCKS_WRITE = "index.blocks.write"; + public static final Setting INDEX_BLOCKS_WRITE_SETTING = Setting.boolSetting(SETTING_BLOCKS_WRITE, false, true, Setting.Scope.INDEX); + public static final String SETTING_BLOCKS_METADATA = "index.blocks.metadata"; + public static final Setting INDEX_BLOCKS_METADATA_SETTING = Setting.boolSetting(SETTING_BLOCKS_METADATA, false, true, Setting.Scope.INDEX); + public static final String SETTING_VERSION_CREATED = "index.version.created"; public static final String SETTING_VERSION_CREATED_STRING = "index.version.created_string"; public static final String SETTING_VERSION_UPGRADED = "index.version.upgraded"; @@ -167,12 +182,23 @@ public class IndexMetaData implements Diffable, FromXContentBuild public static final String SETTING_VERSION_MINIMUM_COMPATIBLE = "index.version.minimum_compatible"; public static final String SETTING_CREATION_DATE = "index.creation_date"; public static final String SETTING_PRIORITY = "index.priority"; + public static final Setting INDEX_PRIORITY_SETTING = Setting.intSetting("index.priority", 1, 0, true, Setting.Scope.INDEX); public static final String SETTING_CREATION_DATE_STRING = "index.creation_date_string"; public static final String SETTING_INDEX_UUID = "index.uuid"; public static final String SETTING_DATA_PATH = "index.data_path"; + public static final Setting INDEX_DATA_PATH_SETTING = new Setting<>(SETTING_DATA_PATH, "", Function.identity(), false, Setting.Scope.INDEX); public static final String SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE = "index.shared_filesystem.recover_on_any_node"; + public static final Setting INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING = Setting.boolSetting(SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false, true, Setting.Scope.INDEX); public static final String INDEX_UUID_NA_VALUE = "_na_"; + public static final Setting INDEX_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.require.", true, Setting.Scope.INDEX); + public static final Setting INDEX_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.include.", true, Setting.Scope.INDEX); + public static final Setting INDEX_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.exclude.", true, Setting.Scope.INDEX); + + public static final IndexMetaData PROTO = IndexMetaData.builder("") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1).numberOfReplicas(0).build(); + public static final String KEY_ACTIVE_ALLOCATIONS = "active_allocations"; private final int numberOfShards; @@ -627,10 +653,6 @@ public class IndexMetaData implements Diffable, FromXContentBuild return this; } - public long creationDate() { - return settings.getAsLong(SETTING_CREATION_DATE, -1l); - } - public Builder settings(Settings.Builder settings) { this.settings = settings.build(); return this; @@ -645,11 +667,6 @@ public class IndexMetaData implements Diffable, FromXContentBuild return mappings.get(type); } - public Builder removeMapping(String mappingType) { - mappings.remove(mappingType); - return this; - } - public Builder putMapping(String type, String source) throws IOException { try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) { putMapping(new MappingMetaData(type, parser.mapOrdered())); @@ -692,24 +709,11 @@ public class IndexMetaData implements Diffable, FromXContentBuild return this; } - public Builder removeCustom(String type) { - this.customs.remove(type); - return this; - } - - public Custom getCustom(String type) { - return this.customs.get(type); - } - public Builder putActiveAllocationIds(int shardId, Set allocationIds) { activeAllocationIds.put(shardId, new HashSet(allocationIds)); return this; } - public Set getActiveAllocationIds(int shardId) { - return activeAllocationIds.get(shardId); - } - public long version() { return this.version; } @@ -758,22 +762,21 @@ public class IndexMetaData implements Diffable, FromXContentBuild filledActiveAllocationIds.put(i, Collections.emptySet()); } } - - Map requireMap = settings.getByPrefix("index.routing.allocation.require.").getAsMap(); + final Map requireMap = INDEX_ROUTING_REQUIRE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters requireFilters; if (requireMap.isEmpty()) { requireFilters = null; } else { requireFilters = DiscoveryNodeFilters.buildFromKeyValue(AND, requireMap); } - Map includeMap = settings.getByPrefix("index.routing.allocation.include.").getAsMap(); + Map includeMap = INDEX_ROUTING_INCLUDE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters includeFilters; if (includeMap.isEmpty()) { includeFilters = null; } else { includeFilters = DiscoveryNodeFilters.buildFromKeyValue(OR, includeMap); } - Map excludeMap = settings.getByPrefix("index.routing.allocation.exclude.").getAsMap(); + Map excludeMap = INDEX_ROUTING_EXCLUDE_GROUP_SETTING.get(settings).getAsMap(); final DiscoveryNodeFilters excludeFilters; if (excludeMap.isEmpty()) { excludeFilters = null; diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index b2c9e500f66..2344e9af77c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -103,13 +104,14 @@ public class MetaDataCreateIndexService extends AbstractComponent { private final IndexTemplateFilter indexTemplateFilter; private final Environment env; private final NodeServicesProvider nodeServicesProvider; + private final IndexScopedSettings indexScopedSettings; @Inject public MetaDataCreateIndexService(Settings settings, ClusterService clusterService, IndicesService indicesService, AllocationService allocationService, Version version, AliasValidator aliasValidator, - Set indexTemplateFilters, Environment env, NodeServicesProvider nodeServicesProvider) { + Set indexTemplateFilters, Environment env, NodeServicesProvider nodeServicesProvider, IndexScopedSettings indexScopedSettings) { super(settings); this.clusterService = clusterService; this.indicesService = indicesService; @@ -118,6 +120,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { this.aliasValidator = aliasValidator; this.env = env; this.nodeServicesProvider = nodeServicesProvider; + this.indexScopedSettings = indexScopedSettings; if (indexTemplateFilters.isEmpty()) { this.indexTemplateFilter = DEFAULT_INDEX_TEMPLATE_FILTER; @@ -174,6 +177,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { public void createIndex(final CreateIndexClusterStateUpdateRequest request, final ActionListener listener) { Settings.Builder updatedSettingsBuilder = Settings.settingsBuilder(); updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX); + indexScopedSettings.validate(updatedSettingsBuilder); request.settings(updatedSettingsBuilder.build()); clusterService.submitStateUpdateTask("create-index [" + request.index() + "], cause [" + request.cause() + "]", @@ -313,7 +317,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { // first, add the default mapping if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) { try { - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false, request.updateAllTypes()); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); } catch (Exception e) { removalReason = "failed on parsing default mapping on index creation"; throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, MapperService.DEFAULT_MAPPING, e.getMessage()); @@ -325,7 +329,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { } try { // apply the default here, its the first time we parse it - mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true, request.updateAllTypes()); + mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); } catch (Exception e) { removalReason = "failed on parsing mappings on index creation"; throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); @@ -460,16 +464,17 @@ public class MetaDataCreateIndexService extends AbstractComponent { } List getIndexSettingsValidationErrors(Settings settings) { - String customPath = settings.get(IndexMetaData.SETTING_DATA_PATH, null); + String customPath = IndexMetaData.INDEX_DATA_PATH_SETTING.get(settings); List validationErrors = new ArrayList<>(); - if (customPath != null && env.sharedDataFile() == null) { + if (Strings.isEmpty(customPath) == false && env.sharedDataFile() == null) { validationErrors.add("path.shared_data must be set in order to use custom data paths"); - } else if (customPath != null) { + } else if (Strings.isEmpty(customPath) == false) { Path resolvedPath = PathUtils.get(new Path[]{env.sharedDataFile()}, customPath); if (resolvedPath == null) { validationErrors.add("custom path [" + customPath + "] is not a sub-path of path.shared_data [" + env.sharedDataFile() + "]"); } } + //norelease - this can be removed? Integer number_of_primaries = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null); Integer number_of_replicas = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, null); if (number_of_primaries != null && number_of_primaries <= 0) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index 32a66bfb764..1e9f968f7a6 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -104,12 +104,9 @@ public class MetaDataIndexAliasesService extends AbstractComponent { // temporarily create the index and add mappings so we can parse the filter try { indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList()); - if (indexMetaData.getMappings().containsKey(MapperService.DEFAULT_MAPPING)) { - indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.getMappings().get(MapperService.DEFAULT_MAPPING).source(), false, false); - } for (ObjectCursor cursor : indexMetaData.getMappings().values()) { MappingMetaData mappingMetaData = cursor.value; - indexService.mapperService().merge(mappingMetaData.type(), mappingMetaData.source(), false, false); + indexService.mapperService().merge(mappingMetaData.type(), mappingMetaData.source(), MapperService.MergeReason.MAPPING_RECOVERY, false); } } catch (Exception e) { logger.warn("[{}] failed to temporary create in order to apply alias action", e, indexMetaData.getIndex()); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index 07e11c692b2..8bbd6f09d7e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -20,12 +20,15 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.misc.IndexMergeTool; import org.elasticsearch.Version; -import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.MapperService; @@ -33,6 +36,7 @@ import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.mapper.MapperRegistry; import java.util.Collections; +import java.util.Map; import java.util.Set; import static java.util.Collections.unmodifiableSet; @@ -49,11 +53,13 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet; public class MetaDataIndexUpgradeService extends AbstractComponent { private final MapperRegistry mapperRegistry; + private final IndexScopedSettings indexScopedSettigns; @Inject - public MetaDataIndexUpgradeService(Settings settings, MapperRegistry mapperRegistry) { + public MetaDataIndexUpgradeService(Settings settings, MapperRegistry mapperRegistry, IndexScopedSettings indexScopedSettings) { super(settings); this.mapperRegistry = mapperRegistry; + this.indexScopedSettigns = indexScopedSettings; } /** @@ -66,22 +72,25 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData) { // Throws an exception if there are too-old segments: if (isUpgraded(indexMetaData)) { + assert indexMetaData == archiveBrokenIndexSettings(indexMetaData) : "all settings must have been upgraded before"; return indexMetaData; } checkSupportedVersion(indexMetaData); IndexMetaData newMetaData = indexMetaData; - newMetaData = addDefaultUnitsIfNeeded(newMetaData); + // we have to run this first otherwise in we try to create IndexSettings + // with broken settings and fail in checkMappingsCompatibility + newMetaData = archiveBrokenIndexSettings(newMetaData); + // only run the check with the upgraded settings!! checkMappingsCompatibility(newMetaData); - newMetaData = markAsUpgraded(newMetaData); - return newMetaData; + return markAsUpgraded(newMetaData); } /** * Checks if the index was already opened by this version of Elasticsearch and doesn't require any additional checks. */ - private boolean isUpgraded(IndexMetaData indexMetaData) { - return indexMetaData.getUpgradedVersion().onOrAfter(Version.V_3_0_0); + boolean isUpgraded(IndexMetaData indexMetaData) { + return indexMetaData.getUpgradedVersion().onOrAfter(Version.CURRENT); } /** @@ -113,103 +122,6 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { return false; } - /** All known byte-sized settings for an index. */ - public static final Set INDEX_BYTES_SIZE_SETTINGS = unmodifiableSet(newHashSet( - "index.merge.policy.floor_segment", - "index.merge.policy.max_merged_segment", - "index.merge.policy.max_merge_size", - "index.merge.policy.min_merge_size", - "index.shard.recovery.file_chunk_size", - "index.shard.recovery.translog_size", - "index.store.throttle.max_bytes_per_sec", - "index.translog.flush_threshold_size", - "index.translog.fs.buffer_size", - "index.version_map_size")); - - /** All known time settings for an index. */ - public static final Set INDEX_TIME_SETTINGS = unmodifiableSet(newHashSet( - "index.gateway.wait_for_mapping_update_post_recovery", - "index.shard.wait_for_mapping_update_post_recovery", - "index.gc_deletes", - "index.indexing.slowlog.threshold.index.debug", - "index.indexing.slowlog.threshold.index.info", - "index.indexing.slowlog.threshold.index.trace", - "index.indexing.slowlog.threshold.index.warn", - "index.refresh_interval", - "index.search.slowlog.threshold.fetch.debug", - "index.search.slowlog.threshold.fetch.info", - "index.search.slowlog.threshold.fetch.trace", - "index.search.slowlog.threshold.fetch.warn", - "index.search.slowlog.threshold.query.debug", - "index.search.slowlog.threshold.query.info", - "index.search.slowlog.threshold.query.trace", - "index.search.slowlog.threshold.query.warn", - "index.shadow.wait_for_initial_commit", - "index.store.stats_refresh_interval", - "index.translog.flush_threshold_period", - "index.translog.interval", - "index.translog.sync_interval", - "index.shard.inactive_time", - UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING)); - - /** - * Elasticsearch 2.0 requires units on byte/memory and time settings; this method adds the default unit to any such settings that are - * missing units. - */ - private IndexMetaData addDefaultUnitsIfNeeded(IndexMetaData indexMetaData) { - if (indexMetaData.getCreationVersion().before(Version.V_2_0_0_beta1)) { - // TODO: can we somehow only do this *once* for a pre-2.0 index? Maybe we could stuff a "fake marker setting" here? Seems hackish... - // Created lazily if we find any settings that are missing units: - Settings settings = indexMetaData.getSettings(); - Settings.Builder newSettings = null; - for(String byteSizeSetting : INDEX_BYTES_SIZE_SETTINGS) { - String value = settings.get(byteSizeSetting); - if (value != null) { - try { - Long.parseLong(value); - } catch (NumberFormatException nfe) { - continue; - } - // It's a naked number that previously would be interpreted as default unit (bytes); now we add it: - logger.warn("byte-sized index setting [{}] with value [{}] is missing units; assuming default units (b) but in future versions this will be a hard error", byteSizeSetting, value); - if (newSettings == null) { - newSettings = Settings.builder(); - newSettings.put(settings); - } - newSettings.put(byteSizeSetting, value + "b"); - } - } - for(String timeSetting : INDEX_TIME_SETTINGS) { - String value = settings.get(timeSetting); - if (value != null) { - try { - Long.parseLong(value); - } catch (NumberFormatException nfe) { - continue; - } - // It's a naked number that previously would be interpreted as default unit (ms); now we add it: - logger.warn("time index setting [{}] with value [{}] is missing units; assuming default units (ms) but in future versions this will be a hard error", timeSetting, value); - if (newSettings == null) { - newSettings = Settings.builder(); - newSettings.put(settings); - } - newSettings.put(timeSetting, value + "ms"); - } - } - if (newSettings != null) { - // At least one setting was changed: - return IndexMetaData.builder(indexMetaData) - .version(indexMetaData.getVersion()) - .settings(newSettings.build()) - .build(); - } - } - - // No changes: - return indexMetaData; - } - - /** * Checks the mappings for compatibility with the current version */ @@ -217,14 +129,14 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { try { // We cannot instantiate real analysis server at this point because the node might not have // been started yet. However, we don't really need real analyzers at this stage - so we can fake it - IndexSettings indexSettings = new IndexSettings(indexMetaData, this.settings, Collections.emptyList()); + IndexSettings indexSettings = new IndexSettings(indexMetaData, this.settings); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); try (AnalysisService analysisService = new FakeAnalysisService(indexSettings)) { try (MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, () -> null)) { for (ObjectCursor cursor : indexMetaData.getMappings().values()) { MappingMetaData mappingMetaData = cursor.value; - mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), false, false); + mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), MapperService.MergeReason.MAPPING_RECOVERY, false); } } } @@ -270,4 +182,39 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { } } + private static final String ARCHIVED_SETTINGS_PREFIX = "archived."; + + IndexMetaData archiveBrokenIndexSettings(IndexMetaData indexMetaData) { + Settings settings = indexMetaData.getSettings(); + Settings.Builder builder = Settings.builder(); + boolean changed = false; + for (Map.Entry entry : settings.getAsMap().entrySet()) { + try { + Setting setting = indexScopedSettigns.get(entry.getKey()); + if (setting != null) { + setting.get(settings); + builder.put(entry.getKey(), entry.getValue()); + } else { + if (indexScopedSettigns.isPrivateSetting(entry.getKey()) || entry.getKey().startsWith(ARCHIVED_SETTINGS_PREFIX)) { + builder.put(entry.getKey(), entry.getValue()); + } else { + changed = true; + logger.warn("[{}] found unknown index setting: {} value: {} - archiving", indexMetaData.getIndex(), entry.getKey(), entry.getValue()); + // we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there + // but we want users to be aware that some of their setting are broken and they can research why and what they need to do to replace them. + builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue()); + } + } + } catch (IllegalArgumentException ex) { + changed = true; + logger.warn("[{}] found invalid index setting: {} value: {} - archiving",ex, indexMetaData.getIndex(), entry.getKey(), entry.getValue()); + // we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there + // but we want users to be aware that some of their setting sare broken and they can research why and what they need to do to replace them. + builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue()); + } + } + + return changed ? IndexMetaData.builder(indexMetaData).settings(builder.build()).build() : indexMetaData; + } + } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java index 14f9f500c45..1d13fc2079e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataMappingService.java @@ -143,7 +143,7 @@ public class MetaDataMappingService extends AbstractComponent { removeIndex = true; for (ObjectCursor metaData : indexMetaData.getMappings().values()) { // don't apply the default mapping, it has been applied when the mapping was created - indexService.mapperService().merge(metaData.value.type(), metaData.value.source(), false, true); + indexService.mapperService().merge(metaData.value.type(), metaData.value.source(), MapperService.MergeReason.MAPPING_RECOVERY, true); } } @@ -223,7 +223,7 @@ public class MetaDataMappingService extends AbstractComponent { IndexService indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, Collections.emptyList()); // add mappings for all types, we need them for cross-type validation for (ObjectCursor mapping : indexMetaData.getMappings().values()) { - indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), false, request.updateAllTypes()); + indexService.mapperService().merge(mapping.value.type(), mapping.value.source(), MapperService.MergeReason.MAPPING_RECOVERY, request.updateAllTypes()); } } } @@ -303,7 +303,7 @@ public class MetaDataMappingService extends AbstractComponent { if (existingMapper != null) { existingSource = existingMapper.mappingSource(); } - DocumentMapper mergedMapper = indexService.mapperService().merge(mappingType, mappingUpdateSource, true, request.updateAllTypes()); + DocumentMapper mergedMapper = indexService.mapperService().merge(mappingType, mappingUpdateSource, MapperService.MergeReason.MAPPING_UPDATE, request.updateAllTypes()); CompressedXContent updatedSource = mergedMapper.mappingSource(); if (existingSource != null) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 35c9c51143f..8e9dbc6b673 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -30,19 +30,20 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; +import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.settings.DynamicSettings; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Priority; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.settings.IndexDynamicSettings; +import org.elasticsearch.index.IndexNotFoundException; import java.util.ArrayList; import java.util.HashMap; @@ -59,25 +60,21 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; */ public class MetaDataUpdateSettingsService extends AbstractComponent implements ClusterStateListener { - // the value we recognize in the "max" position to mean all the nodes - private static final String ALL_NODES_VALUE = "all"; - private final ClusterService clusterService; private final AllocationService allocationService; - private final DynamicSettings dynamicSettings; - private final IndexNameExpressionResolver indexNameExpressionResolver; + private final IndexScopedSettings indexScopedSettings; @Inject - public MetaDataUpdateSettingsService(Settings settings, ClusterService clusterService, AllocationService allocationService, @IndexDynamicSettings DynamicSettings dynamicSettings, IndexNameExpressionResolver indexNameExpressionResolver) { + public MetaDataUpdateSettingsService(Settings settings, ClusterService clusterService, AllocationService allocationService, IndexScopedSettings indexScopedSettings, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings); this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; this.clusterService.add(this); this.allocationService = allocationService; - this.dynamicSettings = dynamicSettings; + this.indexScopedSettings = indexScopedSettings; } @Override @@ -90,69 +87,43 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements final int dataNodeCount = event.state().nodes().dataNodes().size(); Map> nrReplicasChanged = new HashMap<>(); - // we need to do this each time in case it was changed by update settings for (final IndexMetaData indexMetaData : event.state().metaData()) { - String autoExpandReplicas = indexMetaData.getSettings().get(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS); - if (autoExpandReplicas != null && Booleans.parseBoolean(autoExpandReplicas, true)) { // Booleans only work for false values, just as we want it here - try { - final int min; - final int max; + AutoExpandReplicas autoExpandReplicas = IndexMetaData.INDEX_AUTO_EXPAND_REPLICAS_SETTING.get(indexMetaData.getSettings()); + if (autoExpandReplicas.isEnabled()) { + /* + * we have to expand the number of replicas for this index to at least min and at most max nodes here + * so we are bumping it up if we have to or reduce it depending on min/max and the number of datanodes. + * If we change the number of replicas we just let the shard allocator do it's thing once we updated it + * since it goes through the index metadata to figure out if something needs to be done anyway. Do do that + * we issue a cluster settings update command below and kicks off a reroute. + */ + final int min = autoExpandReplicas.getMinReplicas(); + final int max = autoExpandReplicas.getMaxReplicas(dataNodeCount); + int numberOfReplicas = dataNodeCount - 1; + if (numberOfReplicas < min) { + numberOfReplicas = min; + } else if (numberOfReplicas > max) { + numberOfReplicas = max; + } + // same value, nothing to do there + if (numberOfReplicas == indexMetaData.getNumberOfReplicas()) { + continue; + } - final int dash = autoExpandReplicas.indexOf('-'); - if (-1 == dash) { - logger.warn("failed to set [{}] for index [{}], it should be dash delimited [{}]", - IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.getIndex(), autoExpandReplicas); - continue; - } - final String sMin = autoExpandReplicas.substring(0, dash); - try { - min = Integer.parseInt(sMin); - } catch (NumberFormatException e) { - logger.warn("failed to set [{}] for index [{}], minimum value is not a number [{}]", - e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.getIndex(), sMin); - continue; - } - String sMax = autoExpandReplicas.substring(dash + 1); - if (sMax.equals(ALL_NODES_VALUE)) { - max = dataNodeCount - 1; - } else { - try { - max = Integer.parseInt(sMax); - } catch (NumberFormatException e) { - logger.warn("failed to set [{}] for index [{}], maximum value is neither [{}] nor a number [{}]", - e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.getIndex(), ALL_NODES_VALUE, sMax); - continue; - } + if (numberOfReplicas >= min && numberOfReplicas <= max) { + + if (!nrReplicasChanged.containsKey(numberOfReplicas)) { + nrReplicasChanged.put(numberOfReplicas, new ArrayList<>()); } - int numberOfReplicas = dataNodeCount - 1; - if (numberOfReplicas < min) { - numberOfReplicas = min; - } else if (numberOfReplicas > max) { - numberOfReplicas = max; - } - - // same value, nothing to do there - if (numberOfReplicas == indexMetaData.getNumberOfReplicas()) { - continue; - } - - if (numberOfReplicas >= min && numberOfReplicas <= max) { - - if (!nrReplicasChanged.containsKey(numberOfReplicas)) { - nrReplicasChanged.put(numberOfReplicas, new ArrayList()); - } - - nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex()); - } - } catch (Exception e) { - logger.warn("[{}] failed to parse auto expand replicas", e, indexMetaData.getIndex()); + nrReplicasChanged.get(numberOfReplicas).add(indexMetaData.getIndex()); } } } if (nrReplicasChanged.size() > 0) { + // update settings and kick of a reroute (implicit) for them to take effect for (final Integer fNumberOfReplicas : nrReplicasChanged.keySet()) { Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, fNumberOfReplicas).build(); final List indices = nrReplicasChanged.get(fNumberOfReplicas); @@ -182,42 +153,30 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements } public void updateSettings(final UpdateSettingsClusterStateUpdateRequest request, final ActionListener listener) { - Settings.Builder updatedSettingsBuilder = Settings.settingsBuilder(); - updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX); + final Settings normalizedSettings = Settings.settingsBuilder().put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX).build(); + Settings.Builder settingsForClosedIndices = Settings.builder(); + Settings.Builder settingsForOpenIndices = Settings.builder(); + Settings.Builder skipppedSettings = Settings.builder(); + + indexScopedSettings.validate(normalizedSettings); // never allow to change the number of shards - for (String key : updatedSettingsBuilder.internalMap().keySet()) { - if (key.equals(IndexMetaData.SETTING_NUMBER_OF_SHARDS)) { + for (Map.Entry entry : normalizedSettings.getAsMap().entrySet()) { + if (entry.getKey().equals(IndexMetaData.SETTING_NUMBER_OF_SHARDS)) { listener.onFailure(new IllegalArgumentException("can't change the number of shards for an index")); return; } - } - - final Settings closeSettings = updatedSettingsBuilder.build(); - - final Set removedSettings = new HashSet<>(); - final Set errors = new HashSet<>(); - for (Map.Entry setting : updatedSettingsBuilder.internalMap().entrySet()) { - if (!dynamicSettings.hasDynamicSetting(setting.getKey())) { - removedSettings.add(setting.getKey()); + Setting setting = indexScopedSettings.get(entry.getKey()); + assert setting != null; // we already validated the normalized settings + settingsForClosedIndices.put(entry.getKey(), entry.getValue()); + if (setting.isDynamic()) { + settingsForOpenIndices.put(entry.getKey(), entry.getValue()); } else { - String error = dynamicSettings.validateDynamicSetting(setting.getKey(), setting.getValue(), clusterService.state()); - if (error != null) { - errors.add("[" + setting.getKey() + "] - " + error); - } + skipppedSettings.put(entry.getKey(), entry.getValue()); } } - - if (!errors.isEmpty()) { - listener.onFailure(new IllegalArgumentException("can't process the settings: " + errors.toString())); - return; - } - - if (!removedSettings.isEmpty()) { - for (String removedSetting : removedSettings) { - updatedSettingsBuilder.remove(removedSetting); - } - } - final Settings openSettings = updatedSettingsBuilder.build(); + final Settings skippedSettigns = skipppedSettings.build(); + final Settings closedSettings = settingsForClosedIndices.build(); + final Settings openSettings = settingsForOpenIndices.build(); clusterService.submitStateUpdateTask("update-settings", new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { @@ -245,16 +204,16 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements } } - if (closeIndices.size() > 0 && closeSettings.get(IndexMetaData.SETTING_NUMBER_OF_REPLICAS) != null) { + if (closeIndices.size() > 0 && closedSettings.get(IndexMetaData.SETTING_NUMBER_OF_REPLICAS) != null) { throw new IllegalArgumentException(String.format(Locale.ROOT, "Can't update [%s] on closed indices [%s] - can leave index in an unopenable state", IndexMetaData.SETTING_NUMBER_OF_REPLICAS, closeIndices )); } - if (!removedSettings.isEmpty() && !openIndices.isEmpty()) { + if (!skippedSettigns.getAsMap().isEmpty() && !openIndices.isEmpty()) { throw new IllegalArgumentException(String.format(Locale.ROOT, "Can't update non dynamic settings[%s] for open indices [%s]", - removedSettings, + skippedSettigns.getAsMap().keySet(), openIndices )); } @@ -267,57 +226,37 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements } ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - Boolean updatedReadOnly = openSettings.getAsBoolean(IndexMetaData.SETTING_READ_ONLY, null); - if (updatedReadOnly != null) { - for (String index : actualIndices) { - if (updatedReadOnly) { - blocks.addIndexBlock(index, IndexMetaData.INDEX_READ_ONLY_BLOCK); - } else { - blocks.removeIndexBlock(index, IndexMetaData.INDEX_READ_ONLY_BLOCK); - } - } - } - Boolean updateMetaDataBlock = openSettings.getAsBoolean(IndexMetaData.SETTING_BLOCKS_METADATA, null); - if (updateMetaDataBlock != null) { - for (String index : actualIndices) { - if (updateMetaDataBlock) { - blocks.addIndexBlock(index, IndexMetaData.INDEX_METADATA_BLOCK); - } else { - blocks.removeIndexBlock(index, IndexMetaData.INDEX_METADATA_BLOCK); - } - } - } - - Boolean updateWriteBlock = openSettings.getAsBoolean(IndexMetaData.SETTING_BLOCKS_WRITE, null); - if (updateWriteBlock != null) { - for (String index : actualIndices) { - if (updateWriteBlock) { - blocks.addIndexBlock(index, IndexMetaData.INDEX_WRITE_BLOCK); - } else { - blocks.removeIndexBlock(index, IndexMetaData.INDEX_WRITE_BLOCK); - } - } - } - - Boolean updateReadBlock = openSettings.getAsBoolean(IndexMetaData.SETTING_BLOCKS_READ, null); - if (updateReadBlock != null) { - for (String index : actualIndices) { - if (updateReadBlock) { - blocks.addIndexBlock(index, IndexMetaData.INDEX_READ_BLOCK); - } else { - blocks.removeIndexBlock(index, IndexMetaData.INDEX_READ_BLOCK); - } - } - } + maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_READ_ONLY_BLOCK, IndexMetaData.INDEX_READ_ONLY_SETTING, openSettings); + maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_METADATA_BLOCK, IndexMetaData.INDEX_BLOCKS_METADATA_SETTING, openSettings); + maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_WRITE_BLOCK, IndexMetaData.INDEX_BLOCKS_WRITE_SETTING, openSettings); + maybeUpdateClusterBlock(actualIndices, blocks, IndexMetaData.INDEX_READ_BLOCK, IndexMetaData.INDEX_BLOCKS_READ_SETTING, openSettings); if (!openIndices.isEmpty()) { - String[] indices = openIndices.toArray(new String[openIndices.size()]); - metaDataBuilder.updateSettings(openSettings, indices); + for (String index : openIndices) { + IndexMetaData indexMetaData = metaDataBuilder.get(index); + if (indexMetaData == null) { + throw new IndexNotFoundException(index); + } + Settings.Builder updates = Settings.builder(); + Settings.Builder indexSettings = Settings.builder().put(indexMetaData.getSettings()); + if (indexScopedSettings.updateDynamicSettings(openSettings, indexSettings, updates, index)) { + metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings)); + } + } } if (!closeIndices.isEmpty()) { - String[] indices = closeIndices.toArray(new String[closeIndices.size()]); - metaDataBuilder.updateSettings(closeSettings, indices); + for (String index : closeIndices) { + IndexMetaData indexMetaData = metaDataBuilder.get(index); + if (indexMetaData == null) { + throw new IndexNotFoundException(index); + } + Settings.Builder updates = Settings.builder(); + Settings.Builder indexSettings = Settings.builder().put(indexMetaData.getSettings()); + if (indexScopedSettings.updateSettings(closedSettings, indexSettings, updates, index)) { + metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings)); + } + } } @@ -326,12 +265,34 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements // now, reroute in case things change that require it (like number of replicas) RoutingAllocation.Result routingResult = allocationService.reroute(updatedState, "settings update"); updatedState = ClusterState.builder(updatedState).routingResult(routingResult).build(); - + for (String index : openIndices) { + indexScopedSettings.dryRun(updatedState.metaData().index(index).getSettings()); + } + for (String index : closeIndices) { + indexScopedSettings.dryRun(updatedState.metaData().index(index).getSettings()); + } return updatedState; } }); } + /** + * Updates the cluster block only iff the setting exists in the given settings + */ + private static void maybeUpdateClusterBlock(String[] actualIndices, ClusterBlocks.Builder blocks, ClusterBlock block, Setting setting, Settings openSettings) { + if (setting.exists(openSettings)) { + final boolean updateReadBlock = setting.get(openSettings); + for (String index : actualIndices) { + if (updateReadBlock) { + blocks.addIndexBlock(index, block); + } else { + blocks.removeIndexBlock(index, block); + } + } + } + } + + public void upgradeIndexSettings(final UpgradeSettingsClusterStateUpdateRequest request, final ActionListener listener) { diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 0bb64220f1f..d07d3c334ac 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -319,7 +319,7 @@ public class DiscoveryNodes extends AbstractDiffable implements throw new IllegalArgumentException("resolved [" + node + "] into [" + resolvedNodeIds.length + "] nodes, where expected to be resolved to a single node"); } if (resolvedNodeIds.length == 0) { - throw new IllegalArgumentException("failed to resolve [" + node + " ], no matching nodes"); + throw new IllegalArgumentException("failed to resolve [" + node + "], no matching nodes"); } return nodes.get(resolvedNodeIds[0]); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index 0fb7513f73f..4bf196d07d0 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -114,6 +114,16 @@ public class RoutingTable implements Iterable, Diffable { public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime"); - - public static final String INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = "index.unassigned.node_left.delayed_timeout"; private static final TimeValue DEFAULT_DELAYED_NODE_LEFT_TIMEOUT = TimeValue.timeValueMinutes(1); + public static final Setting INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = Setting.timeSetting("index.unassigned.node_left.delayed_timeout", DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, true, Setting.Scope.INDEX); + /** * Reason why the shard is in unassigned state. *

@@ -215,7 +216,7 @@ public class UnassignedInfo implements ToXContent, Writeable { if (reason != Reason.NODE_LEFT) { return 0; } - TimeValue delayTimeout = indexSettings.getAsTime(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, settings.getAsTime(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, DEFAULT_DELAYED_NODE_LEFT_TIMEOUT)); + TimeValue delayTimeout = INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.get(indexSettings, settings); return Math.max(0l, delayTimeout.nanos()); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java index a9ce43c5f76..65a7bd6971a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/ShardsAllocators.java @@ -32,7 +32,7 @@ import org.elasticsearch.gateway.GatewayAllocator; /** * The {@link ShardsAllocator} class offers methods for allocating shard within a cluster. * These methods include moving shards and re-balancing the cluster. It also allows management - * of shards by their state. + * of shards by their state. */ public class ShardsAllocators extends AbstractComponent implements ShardsAllocator { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java new file mode 100644 index 00000000000..31fc51a8979 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java @@ -0,0 +1,240 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.command; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.StreamableReader; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.shard.ShardId; + +import java.io.IOException; +import java.util.function.Consumer; + +/** + * Abstract base class for allocating an unassigned shard to a node + */ +public abstract class AbstractAllocateAllocationCommand implements AllocationCommand, ToXContent { + + private static final String INDEX_KEY = "index"; + private static final String SHARD_KEY = "shard"; + private static final String NODE_KEY = "node"; + + protected static ObjectParser createAllocateParser(String command) { + ObjectParser parser = new ObjectParser<>(command); + parser.declareString(Builder::setIndex, new ParseField(INDEX_KEY)); + parser.declareInt(Builder::setShard, new ParseField(SHARD_KEY)); + parser.declareString(Builder::setNode, new ParseField(NODE_KEY)); + return parser; + } + + protected static abstract class Builder implements StreamableReader> { + protected String index; + protected int shard = -1; + protected String node; + + public void setIndex(String index) { + this.index = index; + } + + public void setShard(int shard) { + this.shard = shard; + } + + public void setNode(String node) { + this.node = node; + } + + @Override + public Builder readFrom(StreamInput in) throws IOException { + index = in.readString(); + shard = in.readVInt(); + node = in.readString(); + return this; + } + + public abstract Builder parse(XContentParser parser) throws IOException; + + public abstract T build(); + + protected void validate() { + if (index == null) { + throw new IllegalArgumentException("Argument [index] must be defined"); + } + if (shard < 0) { + throw new IllegalArgumentException("Argument [shard] must be defined and non-negative"); + } + if (node == null) { + throw new IllegalArgumentException("Argument [node] must be defined"); + } + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.field(INDEX_KEY, shardId().index().name()); + builder.field(SHARD_KEY, shardId().id()); + builder.field(NODE_KEY, node()); + return builder; + } + + public void writeTo(StreamOutput out) throws IOException { + out.writeString(shardId.getIndex()); + out.writeVInt(shardId.getId()); + out.writeString(node); + } + + public static abstract class Factory implements AllocationCommand.Factory { + + protected abstract Builder newBuilder(); + + @Override + public T readFrom(StreamInput in) throws IOException { + return newBuilder().readFrom(in).build(); + } + + @Override + public void writeTo(T command, StreamOutput out) throws IOException { + command.writeTo(out); + } + + @Override + public T fromXContent(XContentParser parser) throws IOException { + return newBuilder().parse(parser).build(); + } + + @Override + public void toXContent(T command, XContentBuilder builder, ToXContent.Params params, String objectName) throws IOException { + if (objectName == null) { + builder.startObject(); + } else { + builder.startObject(objectName); + } + builder.endObject(); + } + } + + protected final ShardId shardId; + protected final String node; + + protected AbstractAllocateAllocationCommand(ShardId shardId, String node) { + this.shardId = shardId; + this.node = node; + } + + /** + * Get the shard id + * + * @return id of the shard + */ + public ShardId shardId() { + return this.shardId; + } + + /** + * Get the id of the node + * + * @return id of the node + */ + public String node() { + return this.node; + } + + /** + * Handle case where a disco node cannot be found in the routing table. Usually means that it's not a data node. + */ + protected RerouteExplanation explainOrThrowMissingRoutingNode(RoutingAllocation allocation, boolean explain, DiscoveryNode discoNode) { + if (!discoNode.dataNode()) { + return explainOrThrowRejectedCommand(explain, allocation, "allocation can only be done on data nodes, not [" + node + "]"); + } else { + return explainOrThrowRejectedCommand(explain, allocation, "could not find [" + node + "] among the routing nodes"); + } + } + + /** + * Utility method for rejecting the current allocation command based on provided reason + */ + protected RerouteExplanation explainOrThrowRejectedCommand(boolean explain, RoutingAllocation allocation, String reason) { + if (explain) { + return new RerouteExplanation(this, allocation.decision(Decision.NO, name() + " (allocation command)", reason)); + } + throw new IllegalArgumentException("[" + name() + "] " + reason); + } + + /** + * Utility method for rejecting the current allocation command based on provided exception + */ + protected RerouteExplanation explainOrThrowRejectedCommand(boolean explain, RoutingAllocation allocation, RuntimeException rte) { + if (explain) { + return new RerouteExplanation(this, allocation.decision(Decision.NO, name() + " (allocation command)", rte.getMessage())); + } + throw rte; + } + + /** + * Initializes an unassigned shard on a node and removes it from the unassigned + * + * @param allocation the allocation + * @param routingNodes the routing nodes + * @param routingNode the node to initialize it to + * @param shardRouting the shard routing that is to be matched in unassigned shards + */ + protected void initializeUnassignedShard(RoutingAllocation allocation, RoutingNodes routingNodes, RoutingNode routingNode, ShardRouting shardRouting) { + initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, null); + } + + /** + * Initializes an unassigned shard on a node and removes it from the unassigned + * + * @param allocation the allocation + * @param routingNodes the routing nodes + * @param routingNode the node to initialize it to + * @param shardRouting the shard routing that is to be matched in unassigned shards + * @param shardRoutingChanges changes to apply for shard routing in unassigned shards before initialization + */ + protected void initializeUnassignedShard(RoutingAllocation allocation, RoutingNodes routingNodes, RoutingNode routingNode, + ShardRouting shardRouting, @Nullable Consumer shardRoutingChanges) { + for (RoutingNodes.UnassignedShards.UnassignedIterator it = routingNodes.unassigned().iterator(); it.hasNext(); ) { + ShardRouting unassigned = it.next(); + if (!unassigned.equalsIgnoringMetaData(shardRouting)) { + continue; + } + if (shardRoutingChanges != null) { + shardRoutingChanges.accept(unassigned); + } + it.initialize(routingNode.nodeId(), unassigned.version(), + allocation.clusterInfo().getShardSize(unassigned, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); + return; + } + assert false : "shard to initialize not found in list of unassigned shards"; + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateAllocationCommand.java deleted file mode 100644 index 5646d308dda..00000000000 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateAllocationCommand.java +++ /dev/null @@ -1,240 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.routing.allocation.command; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.cluster.routing.RoutingNodes; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.UnassignedInfo; -import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; -import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; -import org.elasticsearch.cluster.routing.allocation.decider.Decision; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; - -/** - * Allocates an unassigned shard to a specific node. Note, primary allocation will "force" - * allocation which might mean one will loose data if using local gateway..., use with care - * with the allowPrimary flag. - */ -public class AllocateAllocationCommand implements AllocationCommand { - - public static final String NAME = "allocate"; - - public static class Factory implements AllocationCommand.Factory { - - @Override - public AllocateAllocationCommand readFrom(StreamInput in) throws IOException { - return new AllocateAllocationCommand(ShardId.readShardId(in), in.readString(), in.readBoolean()); - } - - @Override - public void writeTo(AllocateAllocationCommand command, StreamOutput out) throws IOException { - command.shardId().writeTo(out); - out.writeString(command.node()); - out.writeBoolean(command.allowPrimary()); - } - - @Override - public AllocateAllocationCommand fromXContent(XContentParser parser) throws IOException { - String index = null; - int shardId = -1; - String nodeId = null; - boolean allowPrimary = false; - - String currentFieldName = null; - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if ("index".equals(currentFieldName)) { - index = parser.text(); - } else if ("shard".equals(currentFieldName)) { - shardId = parser.intValue(); - } else if ("node".equals(currentFieldName)) { - nodeId = parser.text(); - } else if ("allow_primary".equals(currentFieldName) || "allowPrimary".equals(currentFieldName)) { - allowPrimary = parser.booleanValue(); - } else { - throw new ElasticsearchParseException("[{}] command does not support field [{}]", NAME, currentFieldName); - } - } else { - throw new ElasticsearchParseException("[{}] command does not support complex json tokens [{}]", NAME, token); - } - } - if (index == null) { - throw new ElasticsearchParseException("[{}] command missing the index parameter", NAME); - } - if (shardId == -1) { - throw new ElasticsearchParseException("[{}] command missing the shard parameter", NAME); - } - if (nodeId == null) { - throw new ElasticsearchParseException("[{}] command missing the node parameter", NAME); - } - return new AllocateAllocationCommand(new ShardId(index, shardId), nodeId, allowPrimary); - } - - @Override - public void toXContent(AllocateAllocationCommand command, XContentBuilder builder, ToXContent.Params params, String objectName) throws IOException { - if (objectName == null) { - builder.startObject(); - } else { - builder.startObject(objectName); - } - builder.field("index", command.shardId().index().name()); - builder.field("shard", command.shardId().id()); - builder.field("node", command.node()); - builder.field("allow_primary", command.allowPrimary()); - builder.endObject(); - } - } - - private final ShardId shardId; - private final String node; - private final boolean allowPrimary; - - /** - * Create a new {@link AllocateAllocationCommand} - * - * @param shardId {@link ShardId} of the shrad to assign - * @param node Node to assign the shard to - * @param allowPrimary should the node be allow to allocate the shard as primary - */ - public AllocateAllocationCommand(ShardId shardId, String node, boolean allowPrimary) { - this.shardId = shardId; - this.node = node; - this.allowPrimary = allowPrimary; - } - - @Override - public String name() { - return NAME; - } - - /** - * Get the shards id - * - * @return id of the shard - */ - public ShardId shardId() { - return this.shardId; - } - - /** - * Get the id of the Node - * - * @return id of the Node - */ - public String node() { - return this.node; - } - - /** - * Determine if primary allocation is allowed - * - * @return true if primary allocation is allowed. Otherwise false - */ - public boolean allowPrimary() { - return this.allowPrimary; - } - - @Override - public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) { - final DiscoveryNode discoNode = allocation.nodes().resolveNode(node); - final RoutingNodes routingNodes = allocation.routingNodes(); - - ShardRouting shardRouting = null; - for (ShardRouting routing : routingNodes.unassigned()) { - if (routing.shardId().equals(shardId)) { - // prefer primaries first to allocate - if (shardRouting == null || routing.primary()) { - shardRouting = routing; - } - } - } - - if (shardRouting == null) { - if (explain) { - return new RerouteExplanation(this, allocation.decision(Decision.NO, "allocate_allocation_command", - "failed to find " + shardId + " on the list of unassigned shards")); - } - throw new IllegalArgumentException("[allocate] failed to find " + shardId + " on the list of unassigned shards"); - } - - if (shardRouting.primary() && !allowPrimary) { - if (explain) { - return new RerouteExplanation(this, allocation.decision(Decision.NO, "allocate_allocation_command", - "trying to allocate a primary shard " + shardId + ", which is disabled")); - } - throw new IllegalArgumentException("[allocate] trying to allocate a primary shard " + shardId + ", which is disabled"); - } - - RoutingNode routingNode = routingNodes.node(discoNode.id()); - if (routingNode == null) { - if (!discoNode.dataNode()) { - if (explain) { - return new RerouteExplanation(this, allocation.decision(Decision.NO, "allocate_allocation_command", - "Allocation can only be done on data nodes, not [" + node + "]")); - } - throw new IllegalArgumentException("Allocation can only be done on data nodes, not [" + node + "]"); - } else { - if (explain) { - return new RerouteExplanation(this, allocation.decision(Decision.NO, "allocate_allocation_command", - "Could not find [" + node + "] among the routing nodes")); - } - throw new IllegalStateException("Could not find [" + node + "] among the routing nodes"); - } - } - - Decision decision = allocation.deciders().canAllocate(shardRouting, routingNode, allocation); - if (decision.type() == Decision.Type.NO) { - if (explain) { - return new RerouteExplanation(this, decision); - } - throw new IllegalArgumentException("[allocate] allocation of " + shardId + " on node " + discoNode + " is not allowed, reason: " + decision); - } - // go over and remove it from the unassigned - for (RoutingNodes.UnassignedShards.UnassignedIterator it = routingNodes.unassigned().iterator(); it.hasNext(); ) { - ShardRouting unassigned = it.next(); - if (unassigned != shardRouting) { - continue; - } - // if we force allocation of a primary, we need to move the unassigned info back to treat it as if - // it was index creation - if (unassigned.primary() && unassigned.unassignedInfo().getReason() != UnassignedInfo.Reason.INDEX_CREATED) { - unassigned.updateUnassignedInfo(new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, - "force allocation from previous reason " + unassigned.unassignedInfo().getReason() + ", " + unassigned.unassignedInfo().getMessage(), - unassigned.unassignedInfo().getFailure(), System.nanoTime(), System.currentTimeMillis())); - } - it.initialize(routingNode.nodeId(), unassigned.version(), allocation.clusterInfo().getShardSize(unassigned, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE)); - break; - } - return new RerouteExplanation(this, decision); - } -} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java new file mode 100644 index 00000000000..f607755bca1 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java @@ -0,0 +1,125 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.command; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; + +import java.io.IOException; + +/** + * Allocates an unassigned empty primary shard to a specific node. Use with extreme care as this will result in data loss. + * Allocation deciders are ignored. + */ +public class AllocateEmptyPrimaryAllocationCommand extends BasePrimaryAllocationCommand { + public static final String NAME = "allocate_empty_primary"; + + private static final ObjectParser EMPTY_PRIMARY_PARSER = BasePrimaryAllocationCommand.createAllocatePrimaryParser(NAME); + + /** + * Creates a new {@link AllocateEmptyPrimaryAllocationCommand} + * + * @param shardId {@link ShardId} of the shard to assign + * @param node node id of the node to assign the shard to + * @param acceptDataLoss whether the user agrees to data loss + */ + public AllocateEmptyPrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) { + super(shardId, node, acceptDataLoss); + } + + @Override + public String name() { + return NAME; + } + + public static class Builder extends BasePrimaryAllocationCommand.Builder { + + @Override + public Builder parse(XContentParser parser) throws IOException { + return EMPTY_PRIMARY_PARSER.parse(parser, this); + } + + @Override + public AllocateEmptyPrimaryAllocationCommand build() { + validate(); + return new AllocateEmptyPrimaryAllocationCommand(new ShardId(index, shard), node, acceptDataLoss); + } + } + + public static class Factory extends AbstractAllocateAllocationCommand.Factory { + + @Override + protected Builder newBuilder() { + return new Builder(); + } + } + + @Override + public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) { + final DiscoveryNode discoNode; + try { + discoNode = allocation.nodes().resolveNode(node); + } catch (IllegalArgumentException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + final RoutingNodes routingNodes = allocation.routingNodes(); + RoutingNode routingNode = routingNodes.node(discoNode.id()); + if (routingNode == null) { + return explainOrThrowMissingRoutingNode(allocation, explain, discoNode); + } + + final ShardRouting shardRouting; + try { + shardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard(); + } catch (IndexNotFoundException | ShardNotFoundException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + if (shardRouting.unassigned() == false) { + return explainOrThrowRejectedCommand(explain, allocation, "primary " + shardId + " is already assigned"); + } + + if (shardRouting.unassignedInfo().getReason() != UnassignedInfo.Reason.INDEX_CREATED && acceptDataLoss == false) { + return explainOrThrowRejectedCommand(explain, allocation, + "allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true"); + } + + initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, + shr -> { + if (shr.unassignedInfo().getReason() != UnassignedInfo.Reason.INDEX_CREATED) { + // we need to move the unassigned info back to treat it as if it was index creation + shr.updateUnassignedInfo(new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, + "force empty allocation from previous reason " + shardRouting.unassignedInfo().getReason() + ", " + shardRouting.unassignedInfo().getMessage(), + shardRouting.unassignedInfo().getFailure(), System.nanoTime(), System.currentTimeMillis())); + } + }); + return new RerouteExplanation(this, allocation.decision(Decision.YES, name() + " (allocation command)", "ignore deciders")); + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java new file mode 100644 index 00000000000..f9d443a6618 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateReplicaAllocationCommand.java @@ -0,0 +1,131 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.command; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; + +import java.io.IOException; +import java.util.List; + +/** + * Allocates an unassigned replica shard to a specific node. Checks if allocation deciders allow allocation. + */ +public class AllocateReplicaAllocationCommand extends AbstractAllocateAllocationCommand { + public static final String NAME = "allocate_replica"; + + private static final ObjectParser REPLICA_PARSER = createAllocateParser(NAME); + + /** + * Creates a new {@link AllocateReplicaAllocationCommand} + * + * @param shardId {@link ShardId} of the shard to assign + * @param node node id of the node to assign the shard to + */ + public AllocateReplicaAllocationCommand(ShardId shardId, String node) { + super(shardId, node); + } + + @Override + public String name() { + return NAME; + } + + protected static class Builder extends AbstractAllocateAllocationCommand.Builder { + + @Override + public Builder parse(XContentParser parser) throws IOException { + return REPLICA_PARSER.parse(parser, this); + } + + @Override + public AllocateReplicaAllocationCommand build() { + validate(); + return new AllocateReplicaAllocationCommand(new ShardId(index, shard), node); + } + } + + public static class Factory extends AbstractAllocateAllocationCommand.Factory { + @Override + protected Builder newBuilder() { + return new Builder(); + } + } + + @Override + public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) { + final DiscoveryNode discoNode; + try { + discoNode = allocation.nodes().resolveNode(node); + } catch (IllegalArgumentException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + final RoutingNodes routingNodes = allocation.routingNodes(); + RoutingNode routingNode = routingNodes.node(discoNode.id()); + if (routingNode == null) { + return explainOrThrowMissingRoutingNode(allocation, explain, discoNode); + } + + final ShardRouting primaryShardRouting; + try { + primaryShardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard(); + } catch (IndexNotFoundException | ShardNotFoundException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + if (primaryShardRouting.unassigned()) { + return explainOrThrowRejectedCommand(explain, allocation, + "trying to allocate a replica shard " + shardId + ", while corresponding primary shard is still unassigned"); + } + + List replicaShardRoutings = allocation.routingTable().shardRoutingTable(shardId).replicaShardsWithState(ShardRoutingState.UNASSIGNED); + ShardRouting shardRouting; + if (replicaShardRoutings.isEmpty()) { + return explainOrThrowRejectedCommand(explain, allocation, + "all copies of " + shardId +" are already assigned. Use the move allocation command instead"); + } else { + shardRouting = replicaShardRoutings.get(0); + } + + Decision decision = allocation.deciders().canAllocate(shardRouting, routingNode, allocation); + if (decision.type() == Decision.Type.NO) { + // don't use explainOrThrowRejectedCommand to keep the original "NO" decision + if (explain) { + return new RerouteExplanation(this, decision); + } + throw new IllegalArgumentException("[" + name() + "] allocation of " + shardId + " on node " + discoNode + " is not allowed, reason: " + decision); + } + + initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting); + return new RerouteExplanation(this, decision); + } + + +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java new file mode 100644 index 00000000000..22cedfc6aa3 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java @@ -0,0 +1,124 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.command; + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; + +import java.io.IOException; + +/** + * Allocates an unassigned stale primary shard to a specific node. Use with extreme care as this will result in data loss. + * Allocation deciders are ignored. + */ +public class AllocateStalePrimaryAllocationCommand extends BasePrimaryAllocationCommand { + public static final String NAME = "allocate_stale_primary"; + + private static final ObjectParser STALE_PRIMARY_PARSER = BasePrimaryAllocationCommand.createAllocatePrimaryParser(NAME); + + /** + * Creates a new {@link AllocateStalePrimaryAllocationCommand} + * + * @param shardId {@link ShardId} of the shard to assign + * @param node node id of the node to assign the shard to + * @param acceptDataLoss whether the user agrees to data loss + */ + public AllocateStalePrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) { + super(shardId, node, acceptDataLoss); + } + + @Override + public String name() { + return NAME; + } + + public static class Builder extends BasePrimaryAllocationCommand.Builder { + + @Override + public Builder parse(XContentParser parser) throws IOException { + return STALE_PRIMARY_PARSER.parse(parser, this); + } + + @Override + public AllocateStalePrimaryAllocationCommand build() { + validate(); + return new AllocateStalePrimaryAllocationCommand(new ShardId(index, shard), node, acceptDataLoss); + } + } + + public static class Factory extends AbstractAllocateAllocationCommand.Factory { + + @Override + protected Builder newBuilder() { + return new Builder(); + } + } + + @Override + public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) { + final DiscoveryNode discoNode; + try { + discoNode = allocation.nodes().resolveNode(node); + } catch (IllegalArgumentException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + final RoutingNodes routingNodes = allocation.routingNodes(); + RoutingNode routingNode = routingNodes.node(discoNode.id()); + if (routingNode == null) { + return explainOrThrowMissingRoutingNode(allocation, explain, discoNode); + } + + final ShardRouting shardRouting; + try { + shardRouting = allocation.routingTable().shardRoutingTable(shardId).primaryShard(); + } catch (IndexNotFoundException | ShardNotFoundException e) { + return explainOrThrowRejectedCommand(explain, allocation, e); + } + if (shardRouting.unassigned() == false) { + return explainOrThrowRejectedCommand(explain, allocation, "primary " + shardId + " is already assigned"); + } + + if (acceptDataLoss == false) { + return explainOrThrowRejectedCommand(explain, allocation, + "allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true"); + } + + final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); + if (shardRouting.allocatedPostIndexCreate(indexMetaData) == false) { + return explainOrThrowRejectedCommand(explain, allocation, + "trying to allocate an existing primary shard " + shardId + ", while no such shard has ever been active"); + } + + initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting); + return new RerouteExplanation(this, allocation.decision(Decision.YES, name() + " (allocation command)", "ignore deciders")); + } + +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java index 3e17ce68584..3674f56a949 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocationCommands.java @@ -67,7 +67,9 @@ public class AllocationCommands { } static { - registerFactory(AllocateAllocationCommand.NAME, new AllocateAllocationCommand.Factory()); + registerFactory(AllocateEmptyPrimaryAllocationCommand.NAME, new AllocateEmptyPrimaryAllocationCommand.Factory()); + registerFactory(AllocateStalePrimaryAllocationCommand.NAME, new AllocateStalePrimaryAllocationCommand.Factory()); + registerFactory(AllocateReplicaAllocationCommand.NAME, new AllocateReplicaAllocationCommand.Factory()); registerFactory(CancelAllocationCommand.NAME, new CancelAllocationCommand.Factory()); registerFactory(MoveAllocationCommand.NAME, new MoveAllocationCommand.Factory()); } @@ -76,7 +78,7 @@ public class AllocationCommands { /** * Creates a new set of {@link AllocationCommands} - * + * * @param commands {@link AllocationCommand}s that are wrapped by this instance */ public AllocationCommands(AllocationCommand... commands) { @@ -122,7 +124,7 @@ public class AllocationCommands { * Reads a {@link AllocationCommands} from a {@link StreamInput} * @param in {@link StreamInput} to read from * @return {@link AllocationCommands} read - * + * * @throws IOException if something happens during read */ public static AllocationCommands readFrom(StreamInput in) throws IOException { @@ -137,7 +139,7 @@ public class AllocationCommands { /** * Writes {@link AllocationCommands} to a {@link StreamOutput} - * + * * @param commands Commands to write * @param out {@link StreamOutput} to write the commands to * @throws IOException if something happens during write @@ -149,7 +151,7 @@ public class AllocationCommands { lookupFactorySafe(command.name()).writeTo(command, out); } } - + /** * Reads {@link AllocationCommands} from a {@link XContentParser} *

@@ -161,7 +163,7 @@ public class AllocationCommands {
      * 
* @param parser {@link XContentParser} to read the commands from * @return {@link AllocationCommands} read - * @throws IOException if something bad happens while reading the stream + * @throws IOException if something bad happens while reading the stream */ public static AllocationCommands fromXContent(XContentParser parser) throws IOException { AllocationCommands commands = new AllocationCommands(); @@ -203,10 +205,10 @@ public class AllocationCommands { } return commands; } - + /** * Writes {@link AllocationCommands} to a {@link XContentBuilder} - * + * * @param commands {@link AllocationCommands} to write * @param builder {@link XContentBuilder} to use * @param params Parameters to use for building diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java new file mode 100644 index 00000000000..35c1711d646 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/BasePrimaryAllocationCommand.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation.command; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.shard.ShardId; + +import java.io.IOException; + +/** + * Abstract base class for allocating an unassigned primary shard to a node + */ +public abstract class BasePrimaryAllocationCommand extends AbstractAllocateAllocationCommand { + + private static final String ACCEPT_DATA_LOSS_KEY = "accept_data_loss"; + + protected static ObjectParser createAllocatePrimaryParser(String command) { + ObjectParser parser = AbstractAllocateAllocationCommand.createAllocateParser(command); + parser.declareBoolean(Builder::setAcceptDataLoss, new ParseField(ACCEPT_DATA_LOSS_KEY)); + return parser; + } + + protected final boolean acceptDataLoss; + + protected BasePrimaryAllocationCommand(ShardId shardId, String node, boolean acceptDataLoss) { + super(shardId, node); + this.acceptDataLoss = acceptDataLoss; + } + + /** + * The operation only executes if the user explicitly agrees to possible data loss + * + * @return whether data loss is acceptable + */ + public boolean acceptDataLoss() { + return acceptDataLoss; + } + + protected static abstract class Builder extends AbstractAllocateAllocationCommand.Builder { + protected boolean acceptDataLoss; + + public void setAcceptDataLoss(boolean acceptDataLoss) { + this.acceptDataLoss = acceptDataLoss; + } + + @Override + public Builder readFrom(StreamInput in) throws IOException { + super.readFrom(in); + acceptDataLoss = in.readBoolean(); + return this; + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + super.toXContent(builder, params); + builder.field(ACCEPT_DATA_LOSS_KEY, acceptDataLoss); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(acceptDataLoss); + } +} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java index 7554fa4c46f..c485cb3eab5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java @@ -125,7 +125,7 @@ public class CancelAllocationCommand implements AllocationCommand { /** * Creates a new {@link CancelAllocationCommand} - * + * * @param shardId id of the shard which allocation should be canceled * @param node id of the node that manages the shard which allocation should be canceled */ diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index a31d36db349..3c2e649387a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -32,7 +32,7 @@ import java.util.Locale; /** * This allocation decider allows shard allocations / rebalancing via the cluster wide settings {@link #CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING} / - * {@link #CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} and the per index setting {@link #INDEX_ROUTING_ALLOCATION_ENABLE} / {@link #INDEX_ROUTING_REBALANCE_ENABLE}. + * {@link #CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} and the per index setting {@link #INDEX_ROUTING_ALLOCATION_ENABLE_SETTING} / {@link #INDEX_ROUTING_REBALANCE_ENABLE_SETTING}. * The per index settings overrides the cluster wide setting. * *

@@ -61,10 +61,10 @@ public class EnableAllocationDecider extends AllocationDecider { public static final String NAME = "enable"; public static final Setting CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("cluster.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.CLUSTER); - public static final String INDEX_ROUTING_ALLOCATION_ENABLE= "index.routing.allocation.enable"; + public static final Setting INDEX_ROUTING_ALLOCATION_ENABLE_SETTING = new Setting<>("index.routing.allocation.enable", Allocation.ALL.name(), Allocation::parse, true, Setting.Scope.INDEX); public static final Setting CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("cluster.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.CLUSTER); - public static final String INDEX_ROUTING_REBALANCE_ENABLE = "index.routing.rebalance.enable"; + public static final Setting INDEX_ROUTING_REBALANCE_ENABLE_SETTING = new Setting<>("index.routing.rebalance.enable", Rebalance.ALL.name(), Rebalance::parse, true, Setting.Scope.INDEX); private volatile Rebalance enableRebalance; private volatile Allocation enableAllocation; @@ -92,11 +92,10 @@ public class EnableAllocationDecider extends AllocationDecider { return allocation.decision(Decision.YES, NAME, "allocation disabling is ignored"); } - IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); - String enableIndexValue = indexMetaData.getSettings().get(INDEX_ROUTING_ALLOCATION_ENABLE); + final IndexMetaData indexMetaData = allocation.metaData().index(shardRouting.getIndex()); final Allocation enable; - if (enableIndexValue != null) { - enable = Allocation.parse(enableIndexValue); + if (INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.exists(indexMetaData.getSettings())) { + enable = INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.get(indexMetaData.getSettings()); } else { enable = this.enableAllocation; } @@ -129,10 +128,9 @@ public class EnableAllocationDecider extends AllocationDecider { } Settings indexSettings = allocation.routingNodes().metaData().index(shardRouting.index()).getSettings(); - String enableIndexValue = indexSettings.get(INDEX_ROUTING_REBALANCE_ENABLE); final Rebalance enable; - if (enableIndexValue != null) { - enable = Rebalance.parse(enableIndexValue); + if (INDEX_ROUTING_REBALANCE_ENABLE_SETTING.exists(indexSettings)) { + enable = INDEX_ROUTING_REBALANCE_ENABLE_SETTING.get(indexSettings); } else { enable = this.enableRebalance; } @@ -160,7 +158,7 @@ public class EnableAllocationDecider extends AllocationDecider { /** * Allocation values or rather their string representation to be used used with - * {@link EnableAllocationDecider#CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_ALLOCATION_ENABLE} + * {@link EnableAllocationDecider#CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_ALLOCATION_ENABLE_SETTING} * via cluster / index settings. */ public enum Allocation { @@ -186,7 +184,7 @@ public class EnableAllocationDecider extends AllocationDecider { /** * Rebalance values or rather their string representation to be used used with - * {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE} + * {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} / {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE_SETTING} * via cluster / index settings. */ public enum Rebalance { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index eb9fe10e965..f8ff5f37aed 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -60,10 +60,6 @@ public class FilterAllocationDecider extends AllocationDecider { public static final String NAME = "filter"; - public static final String INDEX_ROUTING_REQUIRE_GROUP = "index.routing.allocation.require."; - public static final String INDEX_ROUTING_INCLUDE_GROUP = "index.routing.allocation.include."; - public static final String INDEX_ROUTING_EXCLUDE_GROUP = "index.routing.allocation.exclude."; - public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.require.", true, Setting.Scope.CLUSTER); public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.include.", true, Setting.Scope.CLUSTER); public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = Setting.groupSetting("cluster.routing.allocation.exclude.", true, Setting.Scope.CLUSTER); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index 9149d04cf60..e766b4c49aa 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -32,12 +32,12 @@ import org.elasticsearch.common.settings.Settings; /** * This {@link AllocationDecider} limits the number of shards per node on a per * index or node-wide basis. The allocator prevents a single node to hold more - * than {@value #INDEX_TOTAL_SHARDS_PER_NODE} per index and + * than index.routing.allocation.total_shards_per_node per index and * cluster.routing.allocation.total_shards_per_node globally during the allocation * process. The limits of this decider can be changed in real-time via a the * index settings API. *

- * If {@value #INDEX_TOTAL_SHARDS_PER_NODE} is reset to a negative value shards + * If index.routing.allocation.total_shards_per_node is reset to a negative value shards * per index are unlimited per node. Shards currently in the * {@link ShardRoutingState#RELOCATING relocating} state are ignored by this * {@link AllocationDecider} until the shard changed its state to either @@ -59,12 +59,13 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { * Controls the maximum number of shards per index on a single Elasticsearch * node. Negative values are interpreted as unlimited. */ - public static final String INDEX_TOTAL_SHARDS_PER_NODE = "index.routing.allocation.total_shards_per_node"; + public static final Setting INDEX_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("index.routing.allocation.total_shards_per_node", -1, -1, true, Setting.Scope.INDEX); + /** * Controls the maximum number of shards per node on a global level. * Negative values are interpreted as unlimited. */ - public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, true, Setting.Scope.CLUSTER); + public static final Setting CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING = Setting.intSetting("cluster.routing.allocation.total_shards_per_node", -1, -1, true, Setting.Scope.CLUSTER); @Inject @@ -81,7 +82,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { IndexMetaData indexMd = allocation.routingNodes().metaData().index(shardRouting.index()); - int indexShardLimit = indexMd.getSettings().getAsInt(INDEX_TOTAL_SHARDS_PER_NODE, -1); + final int indexShardLimit = INDEX_TOTAL_SHARDS_PER_NODE_SETTING.get(indexMd.getSettings(), settings); // Capture the limit here in case it changes during this method's // execution final int clusterShardLimit = this.clusterShardLimit; @@ -118,7 +119,7 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { @Override public Decision canRemain(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { IndexMetaData indexMd = allocation.routingNodes().metaData().index(shardRouting.index()); - int indexShardLimit = indexMd.getSettings().getAsInt(INDEX_TOTAL_SHARDS_PER_NODE, -1); + final int indexShardLimit = INDEX_TOTAL_SHARDS_PER_NODE_SETTING.get(indexMd.getSettings(), settings); // Capture the limit here in case it changes during this method's // execution final int clusterShardLimit = this.clusterShardLimit; diff --git a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index ca135728b87..98d98414db3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -630,7 +630,11 @@ public class InternalClusterService extends AbstractLifecycleComponent dynamicSettings; - - public static class Builder { - private ImmutableOpenMap.Builder settings = ImmutableOpenMap.builder(); - - public void addSetting(String setting, Validator validator) { - Validator old = settings.put(setting, validator); - if (old != null) { - throw new IllegalArgumentException("Cannot register setting [" + setting + "] twice"); - } - } - - public DynamicSettings build() { - return new DynamicSettings(settings.build()); - } - } - - private DynamicSettings(ImmutableOpenMap settings) { - this.dynamicSettings = settings; - } - - public boolean isDynamicOrLoggingSetting(String key) { - return hasDynamicSetting(key) || key.startsWith("logger."); - } - - public boolean hasDynamicSetting(String key) { - for (ObjectCursor dynamicSetting : dynamicSettings.keys()) { - if (Regex.simpleMatch(dynamicSetting.value, key)) { - return true; - } - } - return false; - } - - public String validateDynamicSetting(String dynamicSetting, String value, ClusterState clusterState) { - for (ObjectObjectCursor setting : dynamicSettings) { - if (Regex.simpleMatch(setting.key, dynamicSetting)) { - return setting.value.validate(dynamicSetting, value, clusterState); - } - } - return null; - } -} diff --git a/core/src/main/java/org/elasticsearch/cluster/settings/Validator.java b/core/src/main/java/org/elasticsearch/cluster/settings/Validator.java deleted file mode 100644 index cb253dceadf..00000000000 --- a/core/src/main/java/org/elasticsearch/cluster/settings/Validator.java +++ /dev/null @@ -1,307 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.settings; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.Booleans; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.unit.TimeValue; - -import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; -import static org.elasticsearch.common.unit.MemorySizeValue.parseBytesSizeValueOrHeapRatio; - - -/** - * Validates a setting, returning a failure message if applicable. - */ -public interface Validator { - - String validate(String setting, String value, ClusterState clusterState); - - Validator EMPTY = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - return null; - } - }; - - Validator TIME = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - if (value == null) { - throw new NullPointerException("value must not be null"); - } - try { - // This never returns null: - TimeValue.parseTimeValue(value, null, setting); - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - Validator TIMEOUT = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (value == null) { - throw new NullPointerException("value must not be null"); - } - TimeValue timeValue = TimeValue.parseTimeValue(value, null, setting); - assert timeValue != null; - if (timeValue.millis() < 0 && timeValue.millis() != -1) { - return "cannot parse value [" + value + "] as a timeout"; - } - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - Validator TIME_NON_NEGATIVE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (value == null) { - throw new NullPointerException("value must not be null"); - } - TimeValue timeValue = TimeValue.parseTimeValue(value, null, setting); - assert timeValue != null; - if (timeValue.millis() < 0) { - return "cannot parse value [" + value + "] as non negative time"; - } - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - Validator FLOAT = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - Float.parseFloat(value); - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as a float"; - } - return null; - } - }; - - Validator NON_NEGATIVE_FLOAT = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Float.parseFloat(value) < 0.0) { - return "the value of the setting " + setting + " must be a non negative float"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as a double"; - } - return null; - } - }; - - Validator DOUBLE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - Double.parseDouble(value); - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as a double"; - } - return null; - } - }; - - Validator NON_NEGATIVE_DOUBLE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Double.parseDouble(value) < 0.0) { - return "the value of the setting " + setting + " must be a non negative double"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as a double"; - } - return null; - } - }; - - Validator DOUBLE_GTE_2 = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Double.parseDouble(value) < 2.0) { - return "the value of the setting " + setting + " must be >= 2.0"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as a double"; - } - return null; - } - }; - - Validator INTEGER = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - Integer.parseInt(value); - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as an integer"; - } - return null; - } - }; - - Validator POSITIVE_INTEGER = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Integer.parseInt(value) <= 0) { - return "the value of the setting " + setting + " must be a positive integer"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as an integer"; - } - return null; - } - }; - - Validator NON_NEGATIVE_INTEGER = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Integer.parseInt(value) < 0) { - return "the value of the setting " + setting + " must be a non negative integer"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as an integer"; - } - return null; - } - }; - - Validator INTEGER_GTE_2 = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (Integer.parseInt(value) < 2) { - return "the value of the setting " + setting + " must be >= 2"; - } - } catch (NumberFormatException ex) { - return "cannot parse value [" + value + "] as an integer"; - } - return null; - } - }; - - Validator BYTES_SIZE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - parseBytesSizeValue(value, setting); - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - Validator POSITIVE_BYTES_SIZE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState state) { - try { - ByteSizeValue byteSizeValue = parseBytesSizeValue(value, setting); - if (byteSizeValue.getBytes() <= 0) { - return setting + " must be a positive byte size value"; - } - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - Validator PERCENTAGE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - if (value == null) { - return "the value of " + setting + " can not be null"; - } - if (!value.endsWith("%")) { - return "the value [" + value + "] for " + setting + " must end with %"; - } - final double asDouble = Double.parseDouble(value.substring(0, value.length() - 1)); - if (asDouble < 0.0 || asDouble > 100.0) { - return "the value [" + value + "] for " + setting + " must be a percentage between 0% and 100%"; - } - } catch (NumberFormatException ex) { - return ex.getMessage(); - } - return null; - } - }; - - - Validator BYTES_SIZE_OR_PERCENTAGE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - String byteSize = BYTES_SIZE.validate(setting, value, clusterState); - if (byteSize != null) { - String percentage = PERCENTAGE.validate(setting, value, clusterState); - if (percentage == null) { - return null; - } - return percentage + " or be a valid bytes size value, like [16mb]"; - } - return null; - } - }; - - - Validator MEMORY_SIZE = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - try { - parseBytesSizeValueOrHeapRatio(value, setting); - } catch (ElasticsearchParseException ex) { - return ex.getMessage(); - } - return null; - } - }; - - public static final Validator BOOLEAN = new Validator() { - @Override - public String validate(String setting, String value, ClusterState clusterState) { - - if (value != null && (Booleans.isExplicitFalse(value) || Booleans.isExplicitTrue(value))) { - return null; - } - return "cannot parse value [" + value + "] as a boolean"; - } - }; -} diff --git a/core/src/main/java/org/elasticsearch/common/Randomness.java b/core/src/main/java/org/elasticsearch/common/Randomness.java index 7f71afc1c70..154ebf3736b 100644 --- a/core/src/main/java/org/elasticsearch/common/Randomness.java +++ b/core/src/main/java/org/elasticsearch/common/Randomness.java @@ -19,6 +19,7 @@ package org.elasticsearch.common; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import java.lang.reflect.Method; @@ -40,7 +41,7 @@ import java.util.concurrent.ThreadLocalRandom; * setting a reproducible seed. When running the Elasticsearch server * process, non-reproducible sources of randomness are provided (unless * a setting is provided for a module that exposes a seed setting (e.g., - * DiscoveryService#SETTING_DISCOVERY_SEED)). + * DiscoveryService#DISCOVERY_SEED_SETTING)). */ public final class Randomness { private static final Method currentMethod; @@ -68,13 +69,12 @@ public final class Randomness { * seed in the settings with the key setting. * * @param settings the settings containing the seed - * @param setting the key to access the seed + * @param setting the setting to access the seed * @return a reproducible source of randomness */ - public static Random get(Settings settings, String setting) { - Long maybeSeed = settings.getAsLong(setting, null); - if (maybeSeed != null) { - return new Random(maybeSeed); + public static Random get(Settings settings, Setting setting) { + if (setting.exists(settings)) { + return new Random(setting.get(settings)); } else { return get(); } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index c15e0d9130e..bda0106f2b6 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -36,7 +36,7 @@ public class CircleBuilder extends ShapeBuilder { public static final String FIELD_RADIUS = "radius"; public static final GeoShapeType TYPE = GeoShapeType.CIRCLE; - static final CircleBuilder PROTOTYPE = new CircleBuilder(); + public static final CircleBuilder PROTOTYPE = new CircleBuilder(); private DistanceUnit unit = DistanceUnit.DEFAULT; private double radius; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index 9ad51292977..426cbbf7800 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -33,7 +33,7 @@ public class EnvelopeBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; - static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(new Coordinate(-1.0, 1.0), new Coordinate(1.0, -1.0)); + public static final EnvelopeBuilder PROTOTYPE = new EnvelopeBuilder(new Coordinate(-1.0, 1.0), new Coordinate(1.0, -1.0)); private Coordinate topLeft; private Coordinate bottomRight; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index 6e0094f7165..420f61a6799 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -36,7 +36,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.GEOMETRYCOLLECTION; - static final GeometryCollectionBuilder PROTOTYPE = new GeometryCollectionBuilder(); + public static final GeometryCollectionBuilder PROTOTYPE = new GeometryCollectionBuilder(); protected final ArrayList shapes = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index 4542da436bd..8c2870e1e09 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -57,7 +57,7 @@ public class LineStringBuilder extends CoordinateCollection { public static final GeoShapeType TYPE = GeoShapeType.LINESTRING; - static final LineStringBuilder PROTOTYPE = new LineStringBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).coordinate(1.0, 1.0)); + public static final LineStringBuilder PROTOTYPE = new LineStringBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).coordinate(1.0, 1.0)); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index 0de79ac8fe8..e69c0abe4f8 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -37,7 +37,7 @@ public class MultiLineStringBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTILINESTRING; - static final MultiLineStringBuilder PROTOTYPE = new MultiLineStringBuilder(); + public static final MultiLineStringBuilder PROTOTYPE = new MultiLineStringBuilder(); private final ArrayList lines = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index 1ed976f43ee..12b16254957 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -37,7 +37,7 @@ public class MultiPointBuilder extends CoordinateCollection { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOINT; - final static MultiPointBuilder PROTOTYPE = new MultiPointBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).build()); + public static final MultiPointBuilder PROTOTYPE = new MultiPointBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).build()); /** * Create a new {@link MultiPointBuilder}. diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 8d77d395155..394892d909d 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -36,7 +36,7 @@ import java.util.Objects; public class MultiPolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON; - static final MultiPolygonBuilder PROTOTYPE = new MultiPolygonBuilder(); + public static final MultiPolygonBuilder PROTOTYPE = new MultiPolygonBuilder(); private final ArrayList polygons = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index 40e57566d48..1cee6525e7a 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -32,7 +32,7 @@ import java.util.Objects; public class PointBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POINT; - static final PointBuilder PROTOTYPE = new PointBuilder(); + public static final PointBuilder PROTOTYPE = new PointBuilder(); private Coordinate coordinate; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 36589a4a4a7..ab480cfbc24 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -53,7 +53,7 @@ import java.util.concurrent.atomic.AtomicBoolean; public class PolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POLYGON; - static final PolygonBuilder PROTOTYPE = new PolygonBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).coordinate(0.0, 1.0) + public static final PolygonBuilder PROTOTYPE = new PolygonBuilder(new CoordinatesBuilder().coordinate(0.0, 0.0).coordinate(0.0, 1.0) .coordinate(1.0, 0.0).coordinate(0.0, 0.0)); private static final Coordinate[][] EMPTY = new Coordinate[0][]; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java deleted file mode 100644 index c66e969aa3a..00000000000 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilderRegistry.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.geo.builders; - -import org.elasticsearch.common.geo.ShapesAvailability; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; - -/** - * Register the shape builder prototypes with the {@link NamedWriteableRegistry} - */ -public class ShapeBuilderRegistry { - - @Inject - public ShapeBuilderRegistry(NamedWriteableRegistry namedWriteableRegistry) { - if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); - namedWriteableRegistry.registerPrototype(ShapeBuilder.class, GeometryCollectionBuilder.PROTOTYPE); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/common/inject/matcher/AbstractMatcher.java b/core/src/main/java/org/elasticsearch/common/inject/matcher/AbstractMatcher.java index 9ad7bc407d0..931d290fc19 100644 --- a/core/src/main/java/org/elasticsearch/common/inject/matcher/AbstractMatcher.java +++ b/core/src/main/java/org/elasticsearch/common/inject/matcher/AbstractMatcher.java @@ -49,8 +49,8 @@ public abstract class AbstractMatcher implements Matcher { @Override public boolean equals(Object other) { return other instanceof AndMatcher - && ((AndMatcher) other).a.equals(a) - && ((AndMatcher) other).b.equals(b); + && ((AndMatcher) other).a.equals(a) + && ((AndMatcher) other).b.equals(b); } @Override @@ -80,8 +80,8 @@ public abstract class AbstractMatcher implements Matcher { @Override public boolean equals(Object other) { return other instanceof OrMatcher - && ((OrMatcher) other).a.equals(a) - && ((OrMatcher) other).b.equals(b); + && ((OrMatcher) other).a.equals(a) + && ((OrMatcher) other).b.equals(b); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java index b1271e7338d..92aa02ba002 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java @@ -20,9 +20,11 @@ package org.elasticsearch.common.lucene; import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReader.CoreClosedListener; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; +import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -72,7 +74,7 @@ public final class ShardCoreKeyMap { } final boolean added = objects.add(coreKey); assert added; - reader.addCoreClosedListener(ownerCoreCacheKey -> { + CoreClosedListener listener = ownerCoreCacheKey -> { assert coreKey == ownerCoreCacheKey; synchronized (ShardCoreKeyMap.this) { coreKeyToShard.remove(ownerCoreCacheKey); @@ -83,7 +85,20 @@ public final class ShardCoreKeyMap { indexToCoreKey.remove(index); } } - }); + }; + boolean addedListener = false; + try { + reader.addCoreClosedListener(listener); + addedListener = true; + } finally { + if (false == addedListener) { + try { + listener.onClose(coreKey); + } catch (IOException e) { + throw new RuntimeException("Blow up trying to recover from failure to add listener", e); + } + } + } } } } diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java index b3abed6e230..8be907e3072 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -19,13 +19,18 @@ package org.elasticsearch.common.network; +import java.util.Arrays; +import java.util.List; + import org.elasticsearch.client.support.Headers; import org.elasticsearch.client.transport.TransportClientNodesService; import org.elasticsearch.client.transport.support.TransportProxyClient; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting.Scope; import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.http.HttpServer; import org.elasticsearch.http.HttpServerTransport; @@ -135,9 +140,6 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.local.LocalTransport; import org.elasticsearch.transport.netty.NettyTransport; -import java.util.Arrays; -import java.util.List; - /** * A module to handle registering and binding all network related classes. */ @@ -150,7 +152,7 @@ public class NetworkModule extends AbstractModule { public static final String NETTY_TRANSPORT = "netty"; public static final String HTTP_TYPE_KEY = "http.type"; - public static final String HTTP_ENABLED = "http.enabled"; + public static final Setting HTTP_ENABLED = Setting.boolSetting("http.enabled", true, false, Scope.CLUSTER); private static final List> builtinRestHandlers = Arrays.asList( RestMainAction.class, @@ -291,6 +293,7 @@ public class NetworkModule extends AbstractModule { private final ExtensionPoint.ClassSet restHandlers = new ExtensionPoint.ClassSet<>("rest_handler", RestHandler.class); // we must separate the cat rest handlers so RestCatAction can collect them... private final ExtensionPoint.ClassSet catHandlers = new ExtensionPoint.ClassSet<>("cat_handler", AbstractCatAction.class); + private final NamedWriteableRegistry namedWriteableRegistry; /** * Creates a network module that custom networking classes can be plugged into. @@ -298,11 +301,13 @@ public class NetworkModule extends AbstractModule { * @param networkService A constructed network service object to bind. * @param settings The settings for the node * @param transportClient True if only transport classes should be allowed to be registered, false otherwise. + * @param namedWriteableRegistry registry for named writeables for use during streaming */ - public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient) { + public NetworkModule(NetworkService networkService, Settings settings, boolean transportClient, NamedWriteableRegistry namedWriteableRegistry) { this.networkService = networkService; this.settings = settings; this.transportClient = transportClient; + this.namedWriteableRegistry = namedWriteableRegistry; registerTransportService(NETTY_TRANSPORT, TransportService.class); registerTransport(LOCAL_TRANSPORT, LocalTransport.class); registerTransport(NETTY_TRANSPORT, NettyTransport.class); @@ -354,7 +359,7 @@ public class NetworkModule extends AbstractModule { @Override protected void configure() { bind(NetworkService.class).toInstance(networkService); - bind(NamedWriteableRegistry.class).asEagerSingleton(); + bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); transportServiceTypes.bindType(binder(), settings, TRANSPORT_SERVICE_TYPE_KEY, NETTY_TRANSPORT); String defaultTransport = DiscoveryNode.localNode(settings) ? LOCAL_TRANSPORT : NETTY_TRANSPORT; @@ -365,7 +370,7 @@ public class NetworkModule extends AbstractModule { bind(TransportProxyClient.class).asEagerSingleton(); bind(TransportClientNodesService.class).asEagerSingleton(); } else { - if (settings.getAsBoolean(HTTP_ENABLED, true)) { + if (HTTP_ENABLED.get(settings)) { bind(HttpServer.class).asEagerSingleton(); httpTransportTypes.bindType(binder(), settings, HTTP_TYPE_KEY, NETTY_TRANSPORT); } diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java index 835a35d2383..a1286aaec55 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -19,7 +19,9 @@ package org.elasticsearch.common.network; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -41,31 +43,30 @@ public class NetworkService extends AbstractComponent { /** By default, we bind to loopback interfaces */ public static final String DEFAULT_NETWORK_HOST = "_local_"; - private static final String GLOBAL_NETWORK_HOST_SETTING = "network.host"; - private static final String GLOBAL_NETWORK_BINDHOST_SETTING = "network.bind_host"; - private static final String GLOBAL_NETWORK_PUBLISHHOST_SETTING = "network.publish_host"; + public static final Setting> GLOBAL_NETWORK_HOST_SETTING = Setting.listSetting("network.host", Arrays.asList(DEFAULT_NETWORK_HOST), + s -> s, false, Setting.Scope.CLUSTER); + public static final Setting> GLOBAL_NETWORK_BINDHOST_SETTING = Setting.listSetting("network.bind_host", GLOBAL_NETWORK_HOST_SETTING, + s -> s, false, Setting.Scope.CLUSTER); + public static final Setting> GLOBAL_NETWORK_PUBLISHHOST_SETTING = Setting.listSetting("network.publish_host", GLOBAL_NETWORK_HOST_SETTING, + s -> s, false, Setting.Scope.CLUSTER); public static final class TcpSettings { - public static final String TCP_NO_DELAY = "network.tcp.no_delay"; - public static final String TCP_KEEP_ALIVE = "network.tcp.keep_alive"; - public static final String TCP_REUSE_ADDRESS = "network.tcp.reuse_address"; - public static final String TCP_SEND_BUFFER_SIZE = "network.tcp.send_buffer_size"; - public static final String TCP_RECEIVE_BUFFER_SIZE = "network.tcp.receive_buffer_size"; - public static final String TCP_BLOCKING = "network.tcp.blocking"; - public static final String TCP_BLOCKING_SERVER = "network.tcp.blocking_server"; - public static final String TCP_BLOCKING_CLIENT = "network.tcp.blocking_client"; - public static final String TCP_CONNECT_TIMEOUT = "network.tcp.connect_timeout"; - - public static final ByteSizeValue TCP_DEFAULT_SEND_BUFFER_SIZE = null; - public static final ByteSizeValue TCP_DEFAULT_RECEIVE_BUFFER_SIZE = null; - public static final TimeValue TCP_DEFAULT_CONNECT_TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + public static final Setting TCP_NO_DELAY = Setting.boolSetting("network.tcp.no_delay", true, false, Setting.Scope.CLUSTER); + public static final Setting TCP_KEEP_ALIVE = Setting.boolSetting("network.tcp.keep_alive", true, false, Setting.Scope.CLUSTER); + public static final Setting TCP_REUSE_ADDRESS = Setting.boolSetting("network.tcp.reuse_address", NetworkUtils.defaultReuseAddress(), false, Setting.Scope.CLUSTER); + public static final Setting TCP_SEND_BUFFER_SIZE = Setting.byteSizeSetting("network.tcp.send_buffer_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); + public static final Setting TCP_RECEIVE_BUFFER_SIZE = Setting.byteSizeSetting("network.tcp.receive_buffer_size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); + public static final Setting TCP_BLOCKING = Setting.boolSetting("network.tcp.blocking", false, false, Setting.Scope.CLUSTER); + public static final Setting TCP_BLOCKING_SERVER = Setting.boolSetting("network.tcp.blocking_server", TCP_BLOCKING, false, Setting.Scope.CLUSTER); + public static final Setting TCP_BLOCKING_CLIENT = Setting.boolSetting("network.tcp.blocking_client", TCP_BLOCKING, false, Setting.Scope.CLUSTER); + public static final Setting TCP_CONNECT_TIMEOUT = Setting.timeSetting("network.tcp.connect_timeout", new TimeValue(30, TimeUnit.SECONDS), false, Setting.Scope.CLUSTER); } /** * A custom name resolver can support custom lookup keys (my_net_key:ipv4) and also change * the default inet address used in case no settings is provided. */ - public static interface CustomNameResolver { + public interface CustomNameResolver { /** * Resolves the default value if possible. If not, return null. */ @@ -94,6 +95,7 @@ public class NetworkService extends AbstractComponent { /** * Resolves {@code bindHosts} to a list of internet addresses. The list will * not contain duplicate addresses. + * * @param bindHosts list of hosts to bind to. this may contain special pseudo-hostnames * such as _local_ (see the documentation). if it is null, it will be populated * based on global default settings. @@ -102,21 +104,22 @@ public class NetworkService extends AbstractComponent { public InetAddress[] resolveBindHostAddresses(String bindHosts[]) throws IOException { // first check settings if (bindHosts == null) { - bindHosts = settings.getAsArray(GLOBAL_NETWORK_BINDHOST_SETTING, settings.getAsArray(GLOBAL_NETWORK_HOST_SETTING, null)); - } - // next check any registered custom resolvers - if (bindHosts == null) { - for (CustomNameResolver customNameResolver : customNameResolvers) { - InetAddress addresses[] = customNameResolver.resolveDefault(); - if (addresses != null) { - return addresses; + if (GLOBAL_NETWORK_BINDHOST_SETTING.exists(settings) || GLOBAL_NETWORK_HOST_SETTING.exists(settings)) { + // if we have settings use them (we have a fallback to GLOBAL_NETWORK_HOST_SETTING inline + bindHosts = GLOBAL_NETWORK_BINDHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); + } else { + // next check any registered custom resolvers + for (CustomNameResolver customNameResolver : customNameResolvers) { + InetAddress addresses[] = customNameResolver.resolveDefault(); + if (addresses != null) { + return addresses; + } } + // we know it's not here. get the defaults + bindHosts = GLOBAL_NETWORK_BINDHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); } } - // finally, fill with our default - if (bindHosts == null) { - bindHosts = new String[] { DEFAULT_NETWORK_HOST }; - } + InetAddress addresses[] = resolveInetAddresses(bindHosts); // try to deal with some (mis)configuration @@ -138,6 +141,7 @@ public class NetworkService extends AbstractComponent { * only one address is just a current limitation. *

* If {@code publishHosts} resolves to more than one address, then one is selected with magic + * * @param publishHosts list of hosts to publish as. this may contain special pseudo-hostnames * such as _local_ (see the documentation). if it is null, it will be populated * based on global default settings. @@ -145,23 +149,23 @@ public class NetworkService extends AbstractComponent { */ // TODO: needs to be InetAddress[] public InetAddress resolvePublishHostAddresses(String publishHosts[]) throws IOException { - // first check settings if (publishHosts == null) { - publishHosts = settings.getAsArray(GLOBAL_NETWORK_PUBLISHHOST_SETTING, settings.getAsArray(GLOBAL_NETWORK_HOST_SETTING, null)); - } - // next check any registered custom resolvers - if (publishHosts == null) { - for (CustomNameResolver customNameResolver : customNameResolvers) { - InetAddress addresses[] = customNameResolver.resolveDefault(); - if (addresses != null) { - return addresses[0]; + if (GLOBAL_NETWORK_PUBLISHHOST_SETTING.exists(settings) || GLOBAL_NETWORK_HOST_SETTING.exists(settings)) { + // if we have settings use them (we have a fallback to GLOBAL_NETWORK_HOST_SETTING inline + publishHosts = GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); + } else { + // next check any registered custom resolvers + for (CustomNameResolver customNameResolver : customNameResolvers) { + InetAddress addresses[] = customNameResolver.resolveDefault(); + if (addresses != null) { + return addresses[0]; + } } + // we know it's not here. get the defaults + publishHosts = GLOBAL_NETWORK_PUBLISHHOST_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); } } - // finally, fill with our default - if (publishHosts == null) { - publishHosts = new String[] { DEFAULT_NETWORK_HOST }; - } + InetAddress addresses[] = resolveInetAddresses(publishHosts); // TODO: allow publishing multiple addresses // for now... the hack begins @@ -184,17 +188,17 @@ public class NetworkService extends AbstractComponent { throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is wildcard, but multiple addresses specified: this makes no sense"); } } - + // 3. if we end out with multiple publish addresses, select by preference. // don't warn the user, or they will get confused by bind_host vs publish_host etc. if (addresses.length > 1) { List sorted = new ArrayList<>(Arrays.asList(addresses)); NetworkUtils.sortAddresses(sorted); - addresses = new InetAddress[] { sorted.get(0) }; + addresses = new InetAddress[]{sorted.get(0)}; } return addresses[0]; } - + /** resolves (and deduplicates) host specification */ private InetAddress[] resolveInetAddresses(String hosts[]) throws IOException { if (hosts.length == 0) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 2c599559920..65db6d155d3 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -21,14 +21,20 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.util.set.Sets; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.regex.Pattern; /** * A basic setting service that can be used for per-index and per-cluster settings. @@ -36,24 +42,54 @@ import java.util.function.Consumer; */ public abstract class AbstractScopedSettings extends AbstractComponent { private Settings lastSettingsApplied = Settings.EMPTY; - private final List settingUpdaters = new ArrayList<>(); - private final Map> complexMatchers = new HashMap<>(); - private final Map> keySettings = new HashMap<>(); + private final List> settingUpdaters = new CopyOnWriteArrayList<>(); + private final Map> complexMatchers; + private final Map> keySettings; private final Setting.Scope scope; + private static final Pattern KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])*[-\\w]+$"); + private static final Pattern GROUP_KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])+$"); protected AbstractScopedSettings(Settings settings, Set> settingsSet, Setting.Scope scope) { super(settings); - for (Setting entry : settingsSet) { - if (entry.getScope() != scope) { - throw new IllegalArgumentException("Setting must be a cluster setting but was: " + entry.getScope()); + this.lastSettingsApplied = Settings.EMPTY; + this.scope = scope; + Map> complexMatchers = new HashMap<>(); + Map> keySettings = new HashMap<>(); + for (Setting setting : settingsSet) { + if (setting.getScope() != scope) { + throw new IllegalArgumentException("Setting must be a " + scope + " setting but was: " + setting.getScope()); } - if (entry.hasComplexMatcher()) { - complexMatchers.put(entry.getKey(), entry); + if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey())) == false) { + throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]"); + } + if (setting.hasComplexMatcher()) { + complexMatchers.putIfAbsent(setting.getKey(), setting); } else { - keySettings.put(entry.getKey(), entry); + keySettings.putIfAbsent(setting.getKey(), setting); } } - this.scope = scope; + this.complexMatchers = Collections.unmodifiableMap(complexMatchers); + this.keySettings = Collections.unmodifiableMap(keySettings); + } + + protected AbstractScopedSettings(Settings nodeSettings, Settings scopeSettings, AbstractScopedSettings other) { + super(nodeSettings); + this.lastSettingsApplied = scopeSettings; + this.scope = other.scope; + complexMatchers = other.complexMatchers; + keySettings = other.keySettings; + settingUpdaters.addAll(other.settingUpdaters); + } + + /** + * Returns true iff the given key is a valid settings key otherwise false + */ + public static boolean isValidKey(String key) { + return KEY_PATTERN.matcher(key).matches(); + } + + private static boolean isValidGroupKey(String key) { + return GROUP_KEY_PATTERN.matcher(key).matches(); } public Setting.Scope getScope() { @@ -68,7 +104,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { final Settings current = Settings.builder().put(this.settings).put(settings).build(); final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); List exceptions = new ArrayList<>(); - for (SettingUpdater settingUpdater : settingUpdaters) { + for (SettingUpdater settingUpdater : settingUpdaters) { try { if (settingUpdater.hasChanged(current, previous)) { settingUpdater.getValue(current, previous); @@ -99,7 +135,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build(); try { List applyRunnables = new ArrayList<>(); - for (SettingUpdater settingUpdater : settingUpdaters) { + for (SettingUpdater settingUpdater : settingUpdaters) { try { applyRunnables.add(settingUpdater.updater(current, previous)); } catch (Exception ex) { @@ -161,9 +197,38 @@ public abstract class AbstractScopedSettings extends AbstractComponent { addSettingsUpdateConsumer(setting, consumer, (s) -> {}); } + /** + * Validates that all settings in the builder are registered and valid + */ + public final void validate(Settings.Builder settingsBuilder) { + validate(settingsBuilder.build()); + } + + /** + * * Validates that all given settings are registered and valid + */ + public final void validate(Settings settings) { + for (Map.Entry entry : settings.getAsMap().entrySet()) { + validate(entry.getKey(), settings); + } + } + + + /** + * Validates that the setting is valid + */ + public final void validate(String key, Settings settings) { + Setting setting = get(key); + if (setting == null) { + throw new IllegalArgumentException("unknown setting [" + key + "]"); + } + setting.get(settings); + } + /** * Transactional interface to update settings. * @see Setting + * @param the type of the value of the setting */ public interface SettingUpdater { @@ -216,17 +281,16 @@ public abstract class AbstractScopedSettings extends AbstractComponent { /** * Returns the {@link Setting} for the given key or null if the setting can not be found. */ - public Setting get(String key) { + public Setting get(String key) { Setting setting = keySettings.get(key); - if (setting == null) { - for (Map.Entry> entry : complexMatchers.entrySet()) { - if (entry.getValue().match(key)) { - return entry.getValue(); - } - } - } else { + if (setting != null) { return setting; } + for (Map.Entry> entry : complexMatchers.entrySet()) { + if (entry.getValue().match(key)) { + return entry.getValue(); + } + } return null; } @@ -234,7 +298,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { * Returns true if the setting for the given key is dynamically updateable. Otherwise false. */ public boolean hasDynamicSetting(String key) { - final Setting setting = get(key); + final Setting setting = get(key); return setting != null && setting.isDynamic(); } @@ -253,4 +317,93 @@ public abstract class AbstractScopedSettings extends AbstractComponent { return builder.build(); } + /** + * Returns the value for the given setting. + */ + public T get(Setting setting) { + if (setting.getScope() != scope) { + throw new IllegalArgumentException("settings scope doesn't match the setting scope [" + this.scope + "] != [" + setting.getScope() + "]"); + } + if (get(setting.getKey()) == null) { + throw new IllegalArgumentException("setting " + setting.getKey() + " has not been registered"); + } + return setting.get(this.lastSettingsApplied, settings); + } + + /** + * Updates a target settings builder with new, updated or deleted settings from a given settings builder. + *

+ * Note: This method will only allow updates to dynamic settings. if a non-dynamic setting is updated an {@link IllegalArgumentException} is thrown instead. + *

+ * @param toApply the new settings to apply + * @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be removed from this builder + * @param updates a settings builder that holds all updates applied to target + * @param type a free text string to allow better exceptions messages + * @return true if the target has changed otherwise false + */ + public boolean updateDynamicSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) { + return updateSettings(toApply, target, updates, type, true); + } + + /** + * Updates a target settings builder with new, updated or deleted settings from a given settings builder. + * @param toApply the new settings to apply + * @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be removed from this builder + * @param updates a settings builder that holds all updates applied to target + * @param type a free text string to allow better exceptions messages + * @return true if the target has changed otherwise false + */ + public boolean updateSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) { + return updateSettings(toApply, target, updates, type, false); + } + + /** + * Updates a target settings builder with new, updated or deleted settings from a given settings builder. + * @param toApply the new settings to apply + * @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be removed from this builder + * @param updates a settings builder that holds all updates applied to target + * @param type a free text string to allow better exceptions messages + * @param onlyDynamic if false all settings are updated otherwise only dynamic settings are updated. if set to true and a non-dynamic setting is updated an exception is thrown. + * @return true if the target has changed otherwise false + */ + private boolean updateSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type, boolean onlyDynamic) { + boolean changed = false; + final Set toRemove = new HashSet<>(); + Settings.Builder settingsBuilder = Settings.settingsBuilder(); + for (Map.Entry entry : toApply.getAsMap().entrySet()) { + if (entry.getValue() == null) { + toRemove.add(entry.getKey()); + } else if ((onlyDynamic == false && get(entry.getKey()) != null) || hasDynamicSetting(entry.getKey())) { + validate(entry.getKey(), toApply); + settingsBuilder.put(entry.getKey(), entry.getValue()); + updates.put(entry.getKey(), entry.getValue()); + changed = true; + } else { + throw new IllegalArgumentException(type + " setting [" + entry.getKey() + "], not dynamically updateable"); + } + + } + changed |= applyDeletes(toRemove, target); + target.put(settingsBuilder.build()); + return changed; + } + + private static final boolean applyDeletes(Set deletes, Settings.Builder builder) { + boolean changed = false; + for (String entry : deletes) { + Set keysToRemove = new HashSet<>(); + Set keySet = builder.internalMap().keySet(); + for (String key : keySet) { + if (Regex.simpleMatch(entry, key)) { + keysToRemove.add(key); + } + } + for (String key : keysToRemove) { + builder.remove(key); + changed = true; + } + } + return changed; + } + } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 10c602688a4..4680146dd88 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.settings; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.support.DestructiveOperations; +import org.elasticsearch.client.transport.TransportClientNodesService; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.MetaData; @@ -35,13 +36,32 @@ import org.elasticsearch.cluster.routing.allocation.decider.SnapshotInProgressAl import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; +import org.elasticsearch.env.Environment; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.discovery.zen.fd.FaultDetection; +import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing; +import org.elasticsearch.gateway.PrimaryShardAllocator; +import org.elasticsearch.http.netty.NettyHttpServerTransport; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.store.IndexStoreConfig; +import org.elasticsearch.indices.analysis.HunspellService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; +import org.elasticsearch.indices.cache.request.IndicesRequestCache; +import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; +import org.elasticsearch.node.Node; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.repositories.uri.URLRepository; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; @@ -62,7 +82,6 @@ public final class ClusterSettings extends AbstractScopedSettings { super(settings, settingsSet, Setting.Scope.CLUSTER); } - @Override public synchronized Settings applySettings(Settings newSettings) { Settings settings = super.applySettings(newSettings); @@ -83,6 +102,11 @@ public final class ClusterSettings extends AbstractScopedSettings { return settings; } + @Override + public boolean hasDynamicSetting(String key) { + return isLoggerSetting(key) || super.hasDynamicSetting(key); + } + /** * Returns true if the settings is a logger setting. */ @@ -92,6 +116,9 @@ public final class ClusterSettings extends AbstractScopedSettings { public static Set> BUILT_IN_CLUSTER_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING, + TransportClientNodesService.CLIENT_TRANSPORT_NODES_SAMPLER_INTERVAL, // TODO these transport client settings are kind of odd here and should only be valid if we are a transport client + TransportClientNodesService.CLIENT_TRANSPORT_PING_TIMEOUT, + TransportClientNodesService.CLIENT_TRANSPORT_IGNORE_CLUSTER_NAME, AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING, BalancedShardsAllocator.INDEX_BALANCE_FACTOR_SETTING, BalancedShardsAllocator.SHARD_BALANCE_FACTOR_SETTING, @@ -104,6 +131,9 @@ public final class ClusterSettings extends AbstractScopedSettings { FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING, FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING, FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING, + FsRepository.REPOSITORIES_CHUNK_SIZE_SETTING, + FsRepository.REPOSITORIES_COMPRESS_SETTING, + FsRepository.REPOSITORIES_LOCATION_SETTING, IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, IndicesTTLService.INDICES_TTL_INTERVAL_SETTING, @@ -133,6 +163,19 @@ public final class ClusterSettings extends AbstractScopedSettings { DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING, DiscoverySettings.COMMIT_TIMEOUT_SETTING, DiscoverySettings.NO_MASTER_BLOCK_SETTING, + GatewayService.EXPECTED_DATA_NODES_SETTING, + GatewayService.EXPECTED_MASTER_NODES_SETTING, + GatewayService.EXPECTED_NODES_SETTING, + GatewayService.RECOVER_AFTER_DATA_NODES_SETTING, + GatewayService.RECOVER_AFTER_MASTER_NODES_SETTING, + GatewayService.RECOVER_AFTER_NODES_SETTING, + GatewayService.RECOVER_AFTER_TIME_SETTING, + NetworkModule.HTTP_ENABLED, + NettyHttpServerTransport.SETTING_CORS_ALLOW_CREDENTIALS, + NettyHttpServerTransport.SETTING_CORS_ENABLED, + NettyHttpServerTransport.SETTING_CORS_MAX_AGE, + NettyHttpServerTransport.SETTING_HTTP_DETAILED_ERRORS_ENABLED, + NettyHttpServerTransport.SETTING_PIPELINING, HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING, HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING, HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING, @@ -149,5 +192,65 @@ public final class ClusterSettings extends AbstractScopedSettings { HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_TYPE_SETTING, HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_TYPE_SETTING, Transport.TRANSPORT_PROFILES_SETTING, - Transport.TRANSPORT_TCP_COMPRESS))); + Transport.TRANSPORT_TCP_COMPRESS, + NetworkService.GLOBAL_NETWORK_HOST_SETTING, + NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING, + NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING, + NetworkService.TcpSettings.TCP_NO_DELAY, + NetworkService.TcpSettings.TCP_KEEP_ALIVE, + NetworkService.TcpSettings.TCP_REUSE_ADDRESS, + NetworkService.TcpSettings.TCP_SEND_BUFFER_SIZE, + NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE, + NetworkService.TcpSettings.TCP_BLOCKING, + NetworkService.TcpSettings.TCP_BLOCKING_SERVER, + NetworkService.TcpSettings.TCP_BLOCKING_CLIENT, + NetworkService.TcpSettings.TCP_CONNECT_TIMEOUT, + IndexSettings.QUERY_STRING_ANALYZE_WILDCARD, + IndexSettings.QUERY_STRING_ALLOW_LEADING_WILDCARD, + PrimaryShardAllocator.NODE_INITIAL_SHARDS_SETTING, + ScriptService.SCRIPT_CACHE_SIZE_SETTING, + IndicesFieldDataCache.INDICES_FIELDDATA_CLEAN_INTERVAL_SETTING, + IndicesFieldDataCache.INDICES_FIELDDATA_CACHE_SIZE_KEY, + IndicesRequestCache.INDICES_CACHE_QUERY_SIZE, + IndicesRequestCache.INDICES_CACHE_QUERY_EXPIRE, + HunspellService.HUNSPELL_LAZY_LOAD, + HunspellService.HUNSPELL_IGNORE_CASE, + HunspellService.HUNSPELL_DICTIONARY_OPTIONS, + IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT, + Environment.PATH_CONF_SETTING, + Environment.PATH_DATA_SETTING, + Environment.PATH_HOME_SETTING, + Environment.PATH_LOGS_SETTING, + Environment.PATH_PLUGINS_SETTING, + Environment.PATH_REPO_SETTING, + Environment.PATH_SCRIPTS_SETTING, + Environment.PATH_SHARED_DATA_SETTING, + Environment.PIDFILE_SETTING, + DiscoveryService.DISCOVERY_SEED_SETTING, + DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING, + DiscoveryModule.DISCOVERY_TYPE_SETTING, + DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING, + FaultDetection.PING_RETRIES_SETTING, + FaultDetection.PING_TIMEOUT_SETTING, + FaultDetection.REGISTER_CONNECTION_LISTENER_SETTING, + FaultDetection.PING_INTERVAL_SETTING, + FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING, + ZenDiscovery.PING_TIMEOUT_SETTING, + ZenDiscovery.JOIN_TIMEOUT_SETTING, + ZenDiscovery.JOIN_RETRY_ATTEMPTS_SETTING, + ZenDiscovery.JOIN_RETRY_DELAY_SETTING, + ZenDiscovery.MAX_PINGS_FROM_ANOTHER_MASTER_SETTING, + ZenDiscovery.SEND_LEAVE_REQUEST_SETTING, + ZenDiscovery.MASTER_ELECTION_FILTER_CLIENT_SETTING, + ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING, + ZenDiscovery.MASTER_ELECTION_FILTER_DATA_SETTING, + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING, + SearchService.DEFAULT_KEEPALIVE_SETTING, + SearchService.KEEPALIVE_INTERVAL_SETTING, + Node.WRITE_PORTS_FIELD_SETTING, + URLRepository.ALLOWED_URLS_SETTING, + URLRepository.REPOSITORIES_LIST_DIRECTORIES_SETTING, + URLRepository.REPOSITORIES_URL_SETTING, + URLRepository.SUPPORTED_PROTOCOLS_SETTING))); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java new file mode 100644 index 00000000000..86ead8c7ff9 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -0,0 +1,168 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.settings; + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.gateway.PrimaryShardAllocator; +import org.elasticsearch.index.IndexModule; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexingSlowLog; +import org.elasticsearch.index.MergePolicyConfig; +import org.elasticsearch.index.MergeSchedulerConfig; +import org.elasticsearch.index.SearchSlowLog; +import org.elasticsearch.index.cache.bitset.BitsetFilterCache; +import org.elasticsearch.index.engine.EngineConfig; +import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; +import org.elasticsearch.index.store.FsDirectoryService; +import org.elasticsearch.index.store.IndexStore; +import org.elasticsearch.index.store.Store; +import org.elasticsearch.indices.cache.request.IndicesRequestCache; +import org.elasticsearch.search.SearchService; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + +/** + * Encapsulates all valid index level settings. + * @see org.elasticsearch.common.settings.Setting.Scope#INDEX + */ +public final class IndexScopedSettings extends AbstractScopedSettings { + + public static final Predicate INDEX_SETTINGS_KEY_PREDICATE = (s) -> s.startsWith(IndexMetaData.INDEX_SETTING_PREFIX); + + public static Set> BUILT_IN_INDEX_SETTINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING, + IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING, + IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, + MergeSchedulerConfig.AUTO_THROTTLE_SETTING, + MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING, + MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING, + IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING, + IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING, + IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING, + IndexMetaData.INDEX_AUTO_EXPAND_REPLICAS_SETTING, + IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING, + IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING, + IndexMetaData.INDEX_SHADOW_REPLICAS_SETTING, + IndexMetaData.INDEX_SHARED_FILESYSTEM_SETTING, + IndexMetaData.INDEX_READ_ONLY_SETTING, + IndexMetaData.INDEX_BLOCKS_READ_SETTING, + IndexMetaData.INDEX_BLOCKS_WRITE_SETTING, + IndexMetaData.INDEX_BLOCKS_METADATA_SETTING, + IndexMetaData.INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING, + IndexMetaData.INDEX_PRIORITY_SETTING, + IndexMetaData.INDEX_DATA_PATH_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL, + SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, + IndexingSlowLog.INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING, + MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING, + MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING, + IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING, + IndexSettings.INDEX_WARMER_ENABLED_SETTING, + IndexSettings.INDEX_REFRESH_INTERVAL_SETTING, + IndexSettings.MAX_RESULT_WINDOW_SETTING, + IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING, + IndexSettings.DEFAULT_FIELD_SETTING, + IndexSettings.QUERY_STRING_LENIENT_SETTING, + IndexSettings.ALLOW_UNMAPPED, + IndexSettings.INDEX_CHECK_ON_STARTUP, + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING, + IndexSettings.INDEX_GC_DELETES_SETTING, + IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING, + UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, + EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING, + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING, + IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING, + IndexFieldDataService.INDEX_FIELDDATA_CACHE_KEY, + FieldMapper.IGNORE_MALFORMED_SETTING, + FieldMapper.COERCE_SETTING, + Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING, + PercolatorQueriesRegistry.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING, + MapperService.INDEX_MAPPER_DYNAMIC_SETTING, + MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING, + BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING, + IndexModule.INDEX_STORE_TYPE_SETTING, + IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING, + IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING, + PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS_SETTING, + FsDirectoryService.INDEX_LOCK_FACTOR_SETTING, + EngineConfig.INDEX_CODEC_SETTING, + SearchService.INDEX_NORMS_LOADING_SETTING, + // this sucks but we can't really validate all the analyzers/similarity in here + Setting.groupSetting("index.similarity.", false, Setting.Scope.INDEX), // this allows similarity settings to be passed + Setting.groupSetting("index.analysis.", false, Setting.Scope.INDEX) // this allows analysis settings to be passed + + ))); + + public static final IndexScopedSettings DEFAULT_SCOPED_SETTINGS = new IndexScopedSettings(Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + + public IndexScopedSettings(Settings settings, Set> settingsSet) { + super(settings, settingsSet, Setting.Scope.INDEX); + } + + private IndexScopedSettings(Settings settings, IndexScopedSettings other, IndexMetaData metaData) { + super(settings, metaData.getSettings(), other); + } + + public IndexScopedSettings copy(Settings settings, IndexMetaData metaData) { + return new IndexScopedSettings(settings, this, metaData); + } + + public boolean isPrivateSetting(String key) { + switch (key) { + case IndexMetaData.SETTING_CREATION_DATE: + case IndexMetaData.SETTING_INDEX_UUID: + case IndexMetaData.SETTING_VERSION_CREATED: + case IndexMetaData.SETTING_VERSION_UPGRADED: + case MergePolicyConfig.INDEX_MERGE_ENABLED: + return true; + default: + return false; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 4e9e9f9428b..22cab996d63 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -36,12 +36,16 @@ import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.regex.Pattern; +import java.util.stream.Collectors; /** + * A setting. Encapsulates typical stuff like default value, parsing, and scope. + * Some (dynamic=true) can by modified at run time using the API. */ public class Setting extends ToXContentToBytes { private final String key; @@ -67,8 +71,21 @@ public class Setting extends ToXContentToBytes { this.scope = scope; } + /** + * Creates a new Setting instance + * @param key the settings key for this setting. + * @param fallBackSetting a setting to fall back to if the current setting is not set. + * @param parser a parser that parses the string rep into a complex datatype. + * @param dynamic true iff this setting can be dynamically updateable + * @param scope the scope of this setting + */ + public Setting(String key, Setting fallBackSetting, Function parser, boolean dynamic, Scope scope) { + this(key, fallBackSetting::getRaw, parser, dynamic, scope); + } + /** * Returns the settings key or a prefix if this setting is a group setting + * * @see #isGroupSetting() */ public final String getKey() { @@ -103,13 +120,21 @@ public class Setting extends ToXContentToBytes { } /** - * Returns the default values string representation for this setting. + * Returns the default value string representation for this setting. * @param settings a settings object for settings that has a default value depending on another setting if available */ - public final String getDefault(Settings settings) { + public final String getDefaultRaw(Settings settings) { return defaultValue.apply(settings); } + /** + * Returns the default value for this setting. + * @param settings a settings object for settings that has a default value depending on another setting if available + */ + public final T getDefault(Settings settings) { + return parser.apply(getDefaultRaw(settings)); + } + /** * Returns true iff this setting is present in the given settings object. Otherwise false */ @@ -165,6 +190,16 @@ public class Setting extends ToXContentToBytes { return builder; } + /** + * Returns the value for this setting but falls back to the second provided settings object + */ + public final T get(Settings primary, Settings secondary) { + if (exists(primary)) { + return get(primary); + } + return get(secondary); + } + /** * The settings scope - settings can either be cluster settings or per index settings. */ @@ -173,11 +208,18 @@ public class Setting extends ToXContentToBytes { INDEX; } - final AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger) { + /** + * Build a new updater with a noop validator. + */ + final AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger) { return newUpdater(consumer, logger, (s) -> {}); } - AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer validator) { + /** + * Build the updater responsible for validating new values, logging the new + * value, and eventually setting the value where it belongs. + */ + AbstractScopedSettings.SettingUpdater newUpdater(Consumer consumer, ESLogger logger, Consumer validator) { if (isDynamic()) { return new Updater(consumer, logger, validator); } else { @@ -216,7 +258,7 @@ public class Setting extends ToXContentToBytes { } - private class Updater implements AbstractScopedSettings.SettingUpdater { + private final class Updater implements AbstractScopedSettings.SettingUpdater { private final Consumer consumer; private final ESLogger logger; private final Consumer accept; @@ -256,8 +298,8 @@ public class Setting extends ToXContentToBytes { } @Override - public void apply(T value, Settings current, Settings previous) { - logger.info("update [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current)); + public final void apply(T value, Settings current, Settings previous) { + logger.info("updating [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current)); consumer.accept(value); } } @@ -285,6 +327,14 @@ public class Setting extends ToXContentToBytes { return new Setting<>(key, (s) -> Integer.toString(defaultValue), (s) -> parseInt(s, minValue, key), dynamic, scope); } + public static Setting longSetting(String key, long defaultValue, long minValue, boolean dynamic, Scope scope) { + return new Setting<>(key, (s) -> Long.toString(defaultValue), (s) -> parseLong(s, minValue, key), dynamic, scope); + } + + public static Setting simpleString(String key, boolean dynamic, Scope scope) { + return new Setting<>(key, "", Function.identity(), dynamic, scope); + } + public static int parseInt(String s, int minValue, String key) { int value = Integer.parseInt(s); if (value < minValue) { @@ -293,6 +343,14 @@ public class Setting extends ToXContentToBytes { return value; } + public static long parseLong(String s, long minValue, String key) { + long value = Long.parseLong(s); + if (value < minValue) { + throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + } + return value; + } + public static Setting intSetting(String key, int defaultValue, boolean dynamic, Scope scope) { return intSetting(key, defaultValue, Integer.MIN_VALUE, dynamic, scope); } @@ -301,6 +359,10 @@ public class Setting extends ToXContentToBytes { return new Setting<>(key, (s) -> Boolean.toString(defaultValue), Booleans::parseBooleanExact, dynamic, scope); } + public static Setting boolSetting(String key, Setting fallbackSetting, boolean dynamic, Scope scope) { + return new Setting<>(key, fallbackSetting, Booleans::parseBooleanExact, dynamic, scope); + } + public static Setting byteSizeSetting(String key, String percentage, boolean dynamic, Scope scope) { return new Setting<>(key, (s) -> percentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), dynamic, scope); } @@ -316,25 +378,15 @@ public class Setting extends ToXContentToBytes { public static Setting> listSetting(String key, List defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { return listSetting(key, (s) -> defaultStringValue, singleValueParser, dynamic, scope); } + + public static Setting> listSetting(String key, Setting> fallbackSetting, Function singleValueParser, boolean dynamic, Scope scope) { + return listSetting(key, (s) -> parseableStringToList(fallbackSetting.getRaw(s)), singleValueParser, dynamic, scope); + } + public static Setting> listSetting(String key, Function> defaultStringValue, Function singleValueParser, boolean dynamic, Scope scope) { - Function> parser = (s) -> { - try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(s)){ - XContentParser.Token token = xContentParser.nextToken(); - if (token != XContentParser.Token.START_ARRAY) { - throw new IllegalArgumentException("expected START_ARRAY but got " + token); - } - ArrayList list = new ArrayList<>(); - while ((token = xContentParser.nextToken()) !=XContentParser.Token.END_ARRAY) { - if (token != XContentParser.Token.VALUE_STRING) { - throw new IllegalArgumentException("expected VALUE_STRING but got " + token); - } - list.add(singleValueParser.apply(xContentParser.text())); - } - return list; - } catch (IOException e) { - throw new IllegalArgumentException("failed to parse array", e); - } - }; + Function> parser = (s) -> + parseableStringToList(s).stream().map(singleValueParser).collect(Collectors.toList()); + return new Setting>(key, (s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, dynamic, scope) { private final Pattern pattern = Pattern.compile(Pattern.quote(key)+"(\\.\\d+)?"); @Override @@ -343,6 +395,7 @@ public class Setting extends ToXContentToBytes { return array == null ? defaultValue.apply(settings) : arrayToParsableString(array); } + @Override public boolean match(String toTest) { return pattern.matcher(toTest).matches(); } @@ -354,6 +407,26 @@ public class Setting extends ToXContentToBytes { }; } + private static List parseableStringToList(String parsableString) { + try (XContentParser xContentParser = XContentType.JSON.xContent().createParser(parsableString)) { + XContentParser.Token token = xContentParser.nextToken(); + if (token != XContentParser.Token.START_ARRAY) { + throw new IllegalArgumentException("expected START_ARRAY but got " + token); + } + ArrayList list = new ArrayList<>(); + while ((token = xContentParser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new IllegalArgumentException("expected VALUE_STRING but got " + token); + } + list.add(xContentParser.text()); + } + return list; + } catch (IOException e) { + throw new IllegalArgumentException("failed to parse array", e); + } + } + + private static String arrayToParsableString(String[] array) { try { XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); @@ -420,6 +493,7 @@ public class Setting extends ToXContentToBytes { @Override public void apply(Settings value, Settings current, Settings previous) { + logger.info("updating [{}] from [{}] to [{}]", key, getRaw(previous), getRaw(current)); consumer.accept(value); } @@ -460,4 +534,16 @@ public class Setting extends ToXContentToBytes { }, dynamic, scope); } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Setting setting = (Setting) o; + return Objects.equals(key, setting.key); + } + + @Override + public int hashCode() { + return Objects.hash(key); + } } diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index fadc52a556f..601ec7a4bf6 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -58,6 +58,7 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -212,6 +213,19 @@ public final class Settings implements ToXContent { return builder.build(); } + /** + * Returns a new settings object that contains all setting of the current one filtered by the given settings key predicate. + */ + public Settings filter(Predicate predicate) { + Builder builder = new Builder(); + for (Map.Entry entry : getAsMap().entrySet()) { + if (predicate.test(entry.getKey())) { + builder.put(entry.getKey(), entry.getValue()); + } + } + return builder.build(); + } + /** * Returns the settings mapped to the given setting name. */ diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 8bc8ce1b651..eec6e734229 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -34,8 +34,8 @@ public class SettingsModule extends AbstractModule { private final Settings settings; private final SettingsFilter settingsFilter; - private final Map> clusterDynamicSettings = new HashMap<>(); - + private final Map> clusterSettings = new HashMap<>(); + private final Map> indexSettings = new HashMap<>(); public SettingsModule(Settings settings, SettingsFilter settingsFilter) { this.settings = settings; @@ -43,26 +43,50 @@ public class SettingsModule extends AbstractModule { for (Setting setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) { registerSetting(setting); } + for (Setting setting : IndexScopedSettings.BUILT_IN_INDEX_SETTINGS) { + registerSetting(setting); + } } @Override protected void configure() { + final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values())); + final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.clusterSettings.values())); + // by now we are fully configured, lets check node level settings for unregistered index settings + indexScopedSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE)); + // we can't call this method yet since we have not all node level settings registered. + // yet we can validate the ones we have registered to not have invalid values. this is better than nothing + // and progress over perfection and we fail as soon as possible. + // clusterSettings.validate(settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate())); + for (Map.Entry entry : settings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE.negate()).getAsMap().entrySet()) { + if (clusterSettings.get(entry.getKey()) != null) { + clusterSettings.validate(entry.getKey(), settings); + } else if (AbstractScopedSettings.isValidKey(entry.getKey()) == false) { + throw new IllegalArgumentException("illegal settings key: [" + entry.getKey() + "]"); + } + } + bind(Settings.class).toInstance(settings); bind(SettingsFilter.class).toInstance(settingsFilter); - final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(clusterDynamicSettings.values())); + bind(ClusterSettings.class).toInstance(clusterSettings); + bind(IndexScopedSettings.class).toInstance(indexScopedSettings); } public void registerSetting(Setting setting) { switch (setting.getScope()) { case CLUSTER: - if (clusterDynamicSettings.containsKey(setting.getKey())) { + if (clusterSettings.containsKey(setting.getKey())) { throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); } - clusterDynamicSettings.put(setting.getKey(), setting); + clusterSettings.put(setting.getKey(), setting); break; case INDEX: - throw new UnsupportedOperationException("not yet implemented"); + if (indexSettings.containsKey(setting.getKey())) { + throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice"); + } + indexSettings.put(setting.getKey(), setting); + break; } } diff --git a/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java b/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java index 105bda28a99..32df65850a8 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java @@ -20,12 +20,10 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.util.Locale; @@ -176,7 +174,6 @@ public class ByteSizeValue implements Streamable { public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue, String settingName) throws ElasticsearchParseException { settingName = Objects.requireNonNull(settingName); - assert settingName.startsWith("index.") == false || MetaDataIndexUpgradeService.INDEX_BYTES_SIZE_SETTINGS.contains(settingName); if (sValue == null) { return defaultValue; } diff --git a/core/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java b/core/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java index feebd93c5ab..d0e91646c0f 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java +++ b/core/src/main/java/org/elasticsearch/common/unit/DistanceUnit.java @@ -22,6 +22,7 @@ package org.elasticsearch.common.unit; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; @@ -32,7 +33,7 @@ import java.io.IOException; * the earth ellipsoid defined in {@link GeoUtils}. The default unit used within * this project is METERS which is defined by DEFAULT */ -public enum DistanceUnit { +public enum DistanceUnit implements Writeable { INCH(0.0254, "in", "inch"), YARD(0.9144, "yd", "yards"), FEET(0.3048, "ft", "feet"), @@ -322,4 +323,24 @@ public enum DistanceUnit { return new Distance(Double.parseDouble(distance), defaultUnit); } } + + private static final DistanceUnit PROTOTYPE = DEFAULT; + + @Override + public DistanceUnit readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown DistanceUnit ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static DistanceUnit readUnitFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } } diff --git a/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java b/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java index 18641b8e418..24a727691cc 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java +++ b/core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java @@ -67,6 +67,8 @@ public final class Fuzziness implements ToXContent, Writeable { /** * Creates a {@link Fuzziness} instance from an edit distance. The value must be one of [0, 1, 2] + * + * Note: Using this method only makes sense if the field you are applying Fuzziness to is some sort of string. */ public static Fuzziness fromEdits(int edits) { return new Fuzziness(edits); diff --git a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java index dfa0c62d7a6..b1081c2c623 100644 --- a/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java +++ b/core/src/main/java/org/elasticsearch/common/unit/TimeValue.java @@ -20,12 +20,10 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.settings.Settings; import org.joda.time.Period; import org.joda.time.PeriodType; import org.joda.time.format.PeriodFormat; @@ -254,7 +252,6 @@ public class TimeValue implements Streamable { public static TimeValue parseTimeValue(String sValue, TimeValue defaultValue, String settingName) { settingName = Objects.requireNonNull(settingName); - assert settingName.startsWith("index.") == false || MetaDataIndexUpgradeService.INDEX_TIME_SETTINGS.contains(settingName) : settingName; if (sValue == null) { return defaultValue; } diff --git a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java b/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java deleted file mode 100644 index 8d049003824..00000000000 --- a/core/src/main/java/org/elasticsearch/common/util/MultiDataPathUpgrader.java +++ /dev/null @@ -1,383 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.util; - -import org.apache.lucene.index.CheckIndex; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.store.Directory; -import org.apache.lucene.store.Lock; -import org.apache.lucene.store.LockObtainFailedException; -import org.apache.lucene.store.SimpleFSDirectory; -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.env.ShardLock; -import org.elasticsearch.gateway.MetaDataStateFormat; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardPath; -import org.elasticsearch.index.shard.ShardStateMetaData; - -import java.io.IOException; -import java.io.PrintStream; -import java.nio.charset.StandardCharsets; -import java.nio.file.DirectoryStream; -import java.nio.file.FileStore; -import java.nio.file.FileVisitResult; -import java.nio.file.FileVisitor; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; -import java.nio.file.attribute.BasicFileAttributes; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -/** - */ -public class MultiDataPathUpgrader { - - private final NodeEnvironment nodeEnvironment; - private final ESLogger logger = Loggers.getLogger(getClass()); - - - /** - * Creates a new upgrader instance - * @param nodeEnvironment the node env to operate on. - * - */ - public MultiDataPathUpgrader(NodeEnvironment nodeEnvironment) { - this.nodeEnvironment = nodeEnvironment; - } - - - /** - * Upgrades the given shard Id from multiple shard paths into the given target path. - * - * @see #pickShardPath(org.elasticsearch.index.shard.ShardId) - */ - public void upgrade(ShardId shard, ShardPath targetPath) throws IOException { - final Path[] paths = nodeEnvironment.availableShardPaths(shard); // custom data path doesn't need upgrading - if (isTargetPathConfigured(paths, targetPath) == false) { - throw new IllegalArgumentException("shard path must be one of the shards data paths"); - } - assert needsUpgrading(shard) : "Should not upgrade a path that needs no upgrading"; - logger.info("{} upgrading multi data dir to {}", shard, targetPath.getDataPath()); - final ShardStateMetaData loaded = ShardStateMetaData.FORMAT.loadLatestState(logger, paths); - if (loaded == null) { - throw new IllegalStateException(shard + " no shard state found in any of: " + Arrays.toString(paths) + " please check and remove them if possible"); - } - logger.info("{} loaded shard state {}", shard, loaded); - - ShardStateMetaData.FORMAT.write(loaded, loaded.version, targetPath.getShardStatePath()); - Files.createDirectories(targetPath.resolveIndex()); - try (SimpleFSDirectory directory = new SimpleFSDirectory(targetPath.resolveIndex())) { - try (final Lock lock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { - upgradeFiles(shard, targetPath, targetPath.resolveIndex(), ShardPath.INDEX_FOLDER_NAME, paths); - } catch (LockObtainFailedException ex) { - throw new IllegalStateException("Can't obtain lock on " + targetPath.resolveIndex(), ex); - } - - } - - - upgradeFiles(shard, targetPath, targetPath.resolveTranslog(), ShardPath.TRANSLOG_FOLDER_NAME, paths); - - logger.info("{} wipe upgraded directories", shard); - for (Path path : paths) { - if (path.equals(targetPath.getShardStatePath()) == false) { - logger.info("{} wipe shard directories: [{}]", shard, path); - IOUtils.rm(path); - } - } - - if (FileSystemUtils.files(targetPath.resolveIndex()).length == 0) { - throw new IllegalStateException("index folder [" + targetPath.resolveIndex() + "] is empty"); - } - - if (FileSystemUtils.files(targetPath.resolveTranslog()).length == 0) { - throw new IllegalStateException("translog folder [" + targetPath.resolveTranslog() + "] is empty"); - } - } - - /** - * Runs check-index on the target shard and throws an exception if it failed - */ - public void checkIndex(ShardPath targetPath) throws IOException { - BytesStreamOutput os = new BytesStreamOutput(); - PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name()); - try (Directory directory = new SimpleFSDirectory(targetPath.resolveIndex()); - final CheckIndex checkIndex = new CheckIndex(directory)) { - checkIndex.setInfoStream(out); - CheckIndex.Status status = checkIndex.checkIndex(); - out.flush(); - if (!status.clean) { - logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); - throw new IllegalStateException("index check failure"); - } - } - } - - /** - * Returns true iff the given shard needs upgrading. - */ - public boolean needsUpgrading(ShardId shard) { - final Path[] paths = nodeEnvironment.availableShardPaths(shard); - // custom data path doesn't need upgrading neither single path envs - if (paths.length > 1) { - int numPathsExist = 0; - for (Path path : paths) { - if (Files.exists(path.resolve(MetaDataStateFormat.STATE_DIR_NAME))) { - numPathsExist++; - if (numPathsExist > 1) { - return true; - } - } - } - } - return false; - } - - /** - * Picks a target ShardPath to allocate and upgrade the given shard to. It picks the target based on a simple - * heuristic: - *
    - *
  • if the smallest datapath has 2x more space available that the shards total size the datapath with the most bytes for that shard is picked to minimize the amount of bytes to copy
  • - *
  • otherwise the largest available datapath is used as the target no matter how big of a slice of the shard it already holds.
  • - *
- */ - public ShardPath pickShardPath(ShardId shard) throws IOException { - if (needsUpgrading(shard) == false) { - throw new IllegalStateException("Shard doesn't need upgrading"); - } - final NodeEnvironment.NodePath[] paths = nodeEnvironment.nodePaths(); - - // if we need upgrading make sure we have all paths. - for (NodeEnvironment.NodePath path : paths) { - Files.createDirectories(path.resolve(shard)); - } - final ShardFileInfo[] shardFileInfo = getShardFileInfo(shard, paths); - long totalBytesUsedByShard = 0; - long leastUsableSpace = Long.MAX_VALUE; - long mostUsableSpace = Long.MIN_VALUE; - assert shardFileInfo.length == nodeEnvironment.availableShardPaths(shard).length; - for (ShardFileInfo info : shardFileInfo) { - totalBytesUsedByShard += info.spaceUsedByShard; - leastUsableSpace = Math.min(leastUsableSpace, info.usableSpace + info.spaceUsedByShard); - mostUsableSpace = Math.max(mostUsableSpace, info.usableSpace + info.spaceUsedByShard); - } - - if (mostUsableSpace < totalBytesUsedByShard) { - throw new IllegalStateException("Can't upgrade path available space: " + new ByteSizeValue(mostUsableSpace) + " required space: " + new ByteSizeValue(totalBytesUsedByShard)); - } - ShardFileInfo target = shardFileInfo[0]; - if (leastUsableSpace >= (2 * totalBytesUsedByShard)) { - for (ShardFileInfo info : shardFileInfo) { - if (info.spaceUsedByShard > target.spaceUsedByShard) { - target = info; - } - } - } else { - for (ShardFileInfo info : shardFileInfo) { - if (info.usableSpace > target.usableSpace) { - target = info; - } - } - } - return new ShardPath(false, target.path, target.path, IndexMetaData.INDEX_UUID_NA_VALUE /* we don't know */, shard); - } - - private ShardFileInfo[] getShardFileInfo(ShardId shard, NodeEnvironment.NodePath[] paths) throws IOException { - final ShardFileInfo[] info = new ShardFileInfo[paths.length]; - for (int i = 0; i < info.length; i++) { - Path path = paths[i].resolve(shard); - final long usabelSpace = getUsabelSpace(paths[i]); - info[i] = new ShardFileInfo(path, usabelSpace, getSpaceUsedByShard(path)); - } - return info; - } - - protected long getSpaceUsedByShard(Path path) throws IOException { - final long[] spaceUsedByShard = new long[] {0}; - if (Files.exists(path)) { - Files.walkFileTree(path, new FileVisitor() { - @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - if (attrs.isRegularFile()) { - spaceUsedByShard[0] += attrs.size(); - } - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { - return FileVisitResult.CONTINUE; - } - }); - } - return spaceUsedByShard[0]; - } - - protected long getUsabelSpace(NodeEnvironment.NodePath path) throws IOException { - FileStore fileStore = path.fileStore; - return fileStore.getUsableSpace(); - } - - static class ShardFileInfo { - final Path path; - final long usableSpace; - final long spaceUsedByShard; - - ShardFileInfo(Path path, long usableSpace, long spaceUsedByShard) { - this.path = path; - this.usableSpace = usableSpace; - this.spaceUsedByShard = spaceUsedByShard; - } - } - - - - private void upgradeFiles(ShardId shard, ShardPath targetPath, final Path targetDir, String folderName, Path[] paths) throws IOException { - List movedFiles = new ArrayList<>(); - for (Path path : paths) { - if (path.equals(targetPath.getDataPath()) == false) { - final Path sourceDir = path.resolve(folderName); - if (Files.exists(sourceDir)) { - logger.info("{} upgrading [{}] from [{}] to [{}]", shard, folderName, sourceDir, targetDir); - try (DirectoryStream stream = Files.newDirectoryStream(sourceDir)) { - Files.createDirectories(targetDir); - for (Path file : stream) { - if (IndexWriter.WRITE_LOCK_NAME.equals(file.getFileName().toString()) || Files.isDirectory(file)) { - continue; // skip write.lock - } - logger.info("{} move file [{}] size: [{}]", shard, file.getFileName(), Files.size(file)); - final Path targetFile = targetDir.resolve(file.getFileName()); - /* We are pessimistic and do a copy first to the other path and then and atomic move to rename it such that - in the worst case the file exists twice but is never lost or half written.*/ - final Path targetTempFile = Files.createTempFile(targetDir, "upgrade_", "_" + file.getFileName().toString()); - Files.copy(file, targetTempFile, StandardCopyOption.COPY_ATTRIBUTES, StandardCopyOption.REPLACE_EXISTING); - Files.move(targetTempFile, targetFile, StandardCopyOption.ATOMIC_MOVE); // we are on the same FS - this must work otherwise all bets are off - Files.delete(file); - movedFiles.add(targetFile); - } - } - } - } - } - if (movedFiles.isEmpty() == false) { - // fsync later it might be on disk already - logger.info("{} fsync files", shard); - for (Path moved : movedFiles) { - logger.info("{} syncing [{}]", shard, moved.getFileName()); - IOUtils.fsync(moved, false); - } - logger.info("{} syncing directory [{}]", shard, targetDir); - IOUtils.fsync(targetDir, true); - } - } - - - /** - * Returns true iff the target path is one of the given paths. - */ - private boolean isTargetPathConfigured(final Path[] paths, ShardPath targetPath) { - for (Path path : paths) { - if (path.equals(targetPath.getDataPath())) { - return true; - } - } - return false; - } - - /** - * Runs an upgrade on all shards located under the given node environment if there is more than 1 data.path configured - * otherwise this method will return immediately. - */ - public static void upgradeMultiDataPath(NodeEnvironment nodeEnv, ESLogger logger) throws IOException { - if (nodeEnv.nodeDataPaths().length > 1) { - final MultiDataPathUpgrader upgrader = new MultiDataPathUpgrader(nodeEnv); - final Set allIndices = nodeEnv.findAllIndices(); - - for (String index : allIndices) { - for (ShardId shardId : findAllShardIds(nodeEnv.indexPaths(new Index(index)))) { - try (ShardLock lock = nodeEnv.shardLock(shardId, 0)) { - if (upgrader.needsUpgrading(shardId)) { - final ShardPath shardPath = upgrader.pickShardPath(shardId); - upgrader.upgrade(shardId, shardPath); - // we have to check if the index path exists since we might - // have only upgraded the shard state that is written under /indexname/shardid/_state - // in the case we upgraded a dedicated index directory index - if (Files.exists(shardPath.resolveIndex())) { - upgrader.checkIndex(shardPath); - } - } else { - logger.debug("{} no upgrade needed - already upgraded"); - } - } - } - } - } - } - - private static Set findAllShardIds(Path... locations) throws IOException { - final Set shardIds = new HashSet<>(); - for (final Path location : locations) { - if (Files.isDirectory(location)) { - shardIds.addAll(findAllShardsForIndex(location)); - } - } - return shardIds; - } - - private static Set findAllShardsForIndex(Path indexPath) throws IOException { - Set shardIds = new HashSet<>(); - if (Files.isDirectory(indexPath)) { - try (DirectoryStream stream = Files.newDirectoryStream(indexPath)) { - String currentIndex = indexPath.getFileName().toString(); - for (Path shardPath : stream) { - String fileName = shardPath.getFileName().toString(); - if (Files.isDirectory(shardPath) && fileName.chars().allMatch(Character::isDigit)) { - int shardId = Integer.parseInt(fileName); - ShardId id = new ShardId(currentIndex, shardId); - shardIds.add(id); - } - } - } - } - return shardIds; - } - -} diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java index 50d6df9a6a7..6c3e24d1ab9 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnable.java @@ -20,6 +20,9 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.Priority; +import java.util.concurrent.TimeUnit; +import java.util.function.LongSupplier; + /** * */ @@ -27,22 +30,37 @@ public abstract class PrioritizedRunnable implements Runnable, Comparable DISCOVERY_TYPE_SETTING = new Setting<>("discovery.type", + settings -> DiscoveryNode.localNode(settings) ? "local" : "zen", Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting ZEN_MASTER_SERVICE_TYPE_SETTING = new Setting<>("discovery.zen.masterservice.type", + "zen", Function.identity(), false, Setting.Scope.CLUSTER); private final Settings settings; private final List> unicastHostProviders = new ArrayList<>(); @@ -93,15 +97,14 @@ public class DiscoveryModule extends AbstractModule { @Override protected void configure() { - String defaultType = DiscoveryNode.localNode(settings) ? "local" : "zen"; - String discoveryType = settings.get(DISCOVERY_TYPE_KEY, defaultType); + String discoveryType = DISCOVERY_TYPE_SETTING.get(settings); Class discoveryClass = discoveryTypes.get(discoveryType); if (discoveryClass == null) { throw new IllegalArgumentException("Unknown Discovery type [" + discoveryType + "]"); } if (discoveryType.equals("local") == false) { - String masterServiceTypeKey = settings.get(ZEN_MASTER_SERVICE_TYPE_KEY, "zen"); + String masterServiceTypeKey = ZEN_MASTER_SERVICE_TYPE_SETTING.get(settings); final Class masterService = masterServiceType.get(masterServiceTypeKey); if (masterService == null) { throw new IllegalArgumentException("Unknown master service type [" + masterServiceTypeKey + "]"); @@ -121,4 +124,4 @@ public class DiscoveryModule extends AbstractModule { bind(Discovery.class).to(discoveryClass).asEagerSingleton(); bind(DiscoveryService.class).asEagerSingleton(); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java index a82099658ea..22f68a738c7 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -39,8 +40,8 @@ import java.util.concurrent.TimeUnit; */ public class DiscoveryService extends AbstractLifecycleComponent { - public static final String SETTING_INITIAL_STATE_TIMEOUT = "discovery.initial_state_timeout"; - public static final String SETTING_DISCOVERY_SEED = "discovery.id.seed"; + public static final Setting INITIAL_STATE_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.initial_state_timeout", TimeValue.timeValueSeconds(30), false, Setting.Scope.CLUSTER); + public static final Setting DISCOVERY_SEED_SETTING = Setting.longSetting("discovery.id.seed", 0l, Long.MIN_VALUE, false, Setting.Scope.CLUSTER); private static class InitialStateListener implements InitialStateDiscoveryListener { @@ -71,7 +72,7 @@ public class DiscoveryService extends AbstractLifecycleComponent implements Discovery, PingContextProvider { public final static Setting REJOIN_ON_MASTER_GONE_SETTING = Setting.boolSetting("discovery.zen.rejoin_on_master_gone", true, true, Setting.Scope.CLUSTER); - public final static String SETTING_PING_TIMEOUT = "discovery.zen.ping_timeout"; - public final static String SETTING_JOIN_TIMEOUT = "discovery.zen.join_timeout"; - public final static String SETTING_JOIN_RETRY_ATTEMPTS = "discovery.zen.join_retry_attempts"; - public final static String SETTING_JOIN_RETRY_DELAY = "discovery.zen.join_retry_delay"; - public final static String SETTING_MAX_PINGS_FROM_ANOTHER_MASTER = "discovery.zen.max_pings_from_another_master"; - public final static String SETTING_SEND_LEAVE_REQUEST = "discovery.zen.send_leave_request"; - public final static String SETTING_MASTER_ELECTION_FILTER_CLIENT = "discovery.zen.master_election.filter_client"; - public final static String SETTING_MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT = "discovery.zen.master_election.wait_for_joins_timeout"; - public final static String SETTING_MASTER_ELECTION_FILTER_DATA = "discovery.zen.master_election.filter_data"; + public final static Setting PING_TIMEOUT_SETTING = Setting.positiveTimeSetting("discovery.zen.ping_timeout", timeValueSeconds(3), false, Setting.Scope.CLUSTER); + public final static Setting JOIN_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.join_timeout", + settings -> TimeValue.timeValueMillis(PING_TIMEOUT_SETTING.get(settings).millis() * 20).toString(), TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER); + public final static Setting JOIN_RETRY_ATTEMPTS_SETTING = Setting.intSetting("discovery.zen.join_retry_attempts", 3, 1, false, Setting.Scope.CLUSTER); + public final static Setting JOIN_RETRY_DELAY_SETTING = Setting.positiveTimeSetting("discovery.zen.join_retry_delay", TimeValue.timeValueMillis(100), false, Setting.Scope.CLUSTER); + public final static Setting MAX_PINGS_FROM_ANOTHER_MASTER_SETTING = Setting.intSetting("discovery.zen.max_pings_from_another_master", 3, 1, false, Setting.Scope.CLUSTER); + public final static Setting SEND_LEAVE_REQUEST_SETTING = Setting.boolSetting("discovery.zen.send_leave_request", true, false, Setting.Scope.CLUSTER); + public final static Setting MASTER_ELECTION_FILTER_CLIENT_SETTING = Setting.boolSetting("discovery.zen.master_election.filter_client", true, false, Setting.Scope.CLUSTER); + public final static Setting MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.master_election.wait_for_joins_timeout", + settings -> TimeValue.timeValueMillis(JOIN_TIMEOUT_SETTING.get(settings).millis() / 2).toString(), TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER); + public final static Setting MASTER_ELECTION_FILTER_DATA_SETTING = Setting.boolSetting("discovery.zen.master_election.filter_data", false, false, Setting.Scope.CLUSTER); public static final String DISCOVERY_REJOIN_ACTION_NAME = "internal:discovery/zen/rejoin"; @@ -164,26 +166,19 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen this.discoverySettings = discoverySettings; this.pingService = pingService; this.electMaster = electMasterService; - this.pingTimeout = settings.getAsTime(SETTING_PING_TIMEOUT, timeValueSeconds(3)); + this.pingTimeout = PING_TIMEOUT_SETTING.get(settings); - this.joinTimeout = settings.getAsTime(SETTING_JOIN_TIMEOUT, TimeValue.timeValueMillis(this.pingTimeout.millis() * 20)); - this.joinRetryAttempts = settings.getAsInt(SETTING_JOIN_RETRY_ATTEMPTS, 3); - this.joinRetryDelay = settings.getAsTime(SETTING_JOIN_RETRY_DELAY, TimeValue.timeValueMillis(100)); - this.maxPingsFromAnotherMaster = settings.getAsInt(SETTING_MAX_PINGS_FROM_ANOTHER_MASTER, 3); - this.sendLeaveRequest = settings.getAsBoolean(SETTING_SEND_LEAVE_REQUEST, true); + this.joinTimeout = JOIN_TIMEOUT_SETTING.get(settings); + this.joinRetryAttempts = JOIN_RETRY_ATTEMPTS_SETTING.get(settings); + this.joinRetryDelay = JOIN_RETRY_DELAY_SETTING.get(settings); + this.maxPingsFromAnotherMaster = MAX_PINGS_FROM_ANOTHER_MASTER_SETTING.get(settings); + this.sendLeaveRequest = SEND_LEAVE_REQUEST_SETTING.get(settings); - this.masterElectionFilterClientNodes = settings.getAsBoolean(SETTING_MASTER_ELECTION_FILTER_CLIENT, true); - this.masterElectionFilterDataNodes = settings.getAsBoolean(SETTING_MASTER_ELECTION_FILTER_DATA, false); - this.masterElectionWaitForJoinsTimeout = settings.getAsTime(SETTING_MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT, TimeValue.timeValueMillis(joinTimeout.millis() / 2)); + this.masterElectionFilterClientNodes = MASTER_ELECTION_FILTER_CLIENT_SETTING.get(settings); + this.masterElectionFilterDataNodes = MASTER_ELECTION_FILTER_DATA_SETTING.get(settings); + this.masterElectionWaitForJoinsTimeout = MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING.get(settings); this.rejoinOnMasterGone = REJOIN_ON_MASTER_GONE_SETTING.get(settings); - if (this.joinRetryAttempts < 1) { - throw new IllegalArgumentException("'" + SETTING_JOIN_RETRY_ATTEMPTS + "' must be a positive number. got [" + SETTING_JOIN_RETRY_ATTEMPTS + "]"); - } - if (this.maxPingsFromAnotherMaster < 1) { - throw new IllegalArgumentException("'" + SETTING_MAX_PINGS_FROM_ANOTHER_MASTER + "' must be a positive number. got [" + this.maxPingsFromAnotherMaster + "]"); - } - logger.debug("using ping_timeout [{}], join.timeout [{}], master_election.filter_client [{}], master_election.filter_data [{}]", this.pingTimeout, joinTimeout, masterElectionFilterClientNodes, masterElectionFilterDataNodes); clusterSettings.addSettingsUpdateConsumer(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, this::handleMinimumMasterNodesChanged, (value) -> { diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java index 436ef6bc2b5..62b0250315c 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/fd/FaultDetection.java @@ -21,6 +21,8 @@ package org.elasticsearch.discovery.zen.fd; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Scope; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; @@ -35,11 +37,11 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; */ public abstract class FaultDetection extends AbstractComponent { - public static final String SETTING_CONNECT_ON_NETWORK_DISCONNECT = "discovery.zen.fd.connect_on_network_disconnect"; - public static final String SETTING_PING_INTERVAL = "discovery.zen.fd.ping_interval"; - public static final String SETTING_PING_TIMEOUT = "discovery.zen.fd.ping_timeout"; - public static final String SETTING_PING_RETRIES = "discovery.zen.fd.ping_retries"; - public static final String SETTING_REGISTER_CONNECTION_LISTENER = "discovery.zen.fd.register_connection_listener"; + public static final Setting CONNECT_ON_NETWORK_DISCONNECT_SETTING = Setting.boolSetting("discovery.zen.fd.connect_on_network_disconnect", false, false, Scope.CLUSTER); + public static final Setting PING_INTERVAL_SETTING = Setting.positiveTimeSetting("discovery.zen.fd.ping_interval", timeValueSeconds(1), false, Scope.CLUSTER); + public static final Setting PING_TIMEOUT_SETTING = Setting.timeSetting("discovery.zen.fd.ping_timeout", timeValueSeconds(30), false, Scope.CLUSTER); + public static final Setting PING_RETRIES_SETTING = Setting.intSetting("discovery.zen.fd.ping_retries", 3, false, Scope.CLUSTER); + public static final Setting REGISTER_CONNECTION_LISTENER_SETTING = Setting.boolSetting("discovery.zen.fd.register_connection_listener", true, false, Scope.CLUSTER); protected final ThreadPool threadPool; protected final ClusterName clusterName; @@ -60,11 +62,11 @@ public abstract class FaultDetection extends AbstractComponent { this.transportService = transportService; this.clusterName = clusterName; - this.connectOnNetworkDisconnect = settings.getAsBoolean(SETTING_CONNECT_ON_NETWORK_DISCONNECT, false); - this.pingInterval = settings.getAsTime(SETTING_PING_INTERVAL, timeValueSeconds(1)); - this.pingRetryTimeout = settings.getAsTime(SETTING_PING_TIMEOUT, timeValueSeconds(30)); - this.pingRetryCount = settings.getAsInt(SETTING_PING_RETRIES, 3); - this.registerConnectionListener = settings.getAsBoolean(SETTING_REGISTER_CONNECTION_LISTENER, true); + this.connectOnNetworkDisconnect = CONNECT_ON_NETWORK_DISCONNECT_SETTING.get(settings); + this.pingInterval = PING_INTERVAL_SETTING.get(settings); + this.pingRetryTimeout = PING_TIMEOUT_SETTING.get(settings); + this.pingRetryCount = PING_RETRIES_SETTING.get(settings); + this.registerConnectionListener = REGISTER_CONNECTION_LISTENER_SETTING.get(settings); this.connectionListener = new FDConnectionListener(); if (registerConnectionListener) { diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java index 99feb4b7f72..a9960272d0e 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPing.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; @@ -58,6 +59,7 @@ import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -72,6 +74,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; import static org.elasticsearch.common.unit.TimeValue.readTimeValue; import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; @@ -83,7 +86,8 @@ import static org.elasticsearch.discovery.zen.ping.ZenPing.PingResponse.readPing public class UnicastZenPing extends AbstractLifecycleComponent implements ZenPing { public static final String ACTION_NAME = "internal:discovery/zen/unicast"; - public static final String DISCOVERY_ZEN_PING_UNICAST_HOSTS = "discovery.zen.ping.unicast.hosts"; + public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = Setting.listSetting("discovery.zen.ping.unicast.hosts", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING = Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, false, Setting.Scope.CLUSTER); // these limits are per-address public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; @@ -135,13 +139,8 @@ public class UnicastZenPing extends AbstractLifecycleComponent implemen } } - this.concurrentConnects = this.settings.getAsInt("discovery.zen.ping.unicast.concurrent_connects", 10); - String[] hostArr = this.settings.getAsArray(DISCOVERY_ZEN_PING_UNICAST_HOSTS); - // trim the hosts - for (int i = 0; i < hostArr.length; i++) { - hostArr[i] = hostArr[i].trim(); - } - List hosts = CollectionUtils.arrayAsArrayList(hostArr); + this.concurrentConnects = DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings); + List hosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); final int limitPortCounts; if (hosts.isEmpty()) { // if unicast hosts are not specified, fill with simple defaults on the local machine diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java index b6453a4707b..65d62bd9e33 100644 --- a/core/src/main/java/org/elasticsearch/env/Environment.java +++ b/core/src/main/java/org/elasticsearch/env/Environment.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.Constants; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import java.io.IOException; @@ -33,6 +34,9 @@ import java.nio.file.FileStore; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.function.Function; import static org.elasticsearch.common.Strings.cleanPath; @@ -43,6 +47,15 @@ import static org.elasticsearch.common.Strings.cleanPath; // TODO: move PathUtils to be package-private here instead of // public+forbidden api! public class Environment { + public static final Setting PATH_HOME_SETTING = Setting.simpleString("path.home", false, Setting.Scope.CLUSTER); + public static final Setting PATH_CONF_SETTING = Setting.simpleString("path.conf", false, Setting.Scope.CLUSTER); + public static final Setting PATH_SCRIPTS_SETTING = Setting.simpleString("path.scripts", false, Setting.Scope.CLUSTER); + public static final Setting> PATH_DATA_SETTING = Setting.listSetting("path.data", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting PATH_LOGS_SETTING = Setting.simpleString("path.logs", false, Setting.Scope.CLUSTER); + public static final Setting PATH_PLUGINS_SETTING = Setting.simpleString("path.plugins", false, Setting.Scope.CLUSTER); + public static final Setting> PATH_REPO_SETTING = Setting.listSetting("path.repo", Collections.emptyList(), Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting PATH_SHARED_DATA_SETTING = Setting.simpleString("path.shared_data", false, Setting.Scope.CLUSTER); + public static final Setting PIDFILE_SETTING = Setting.simpleString("pidfile", false, Setting.Scope.CLUSTER); private final Settings settings; @@ -95,64 +108,64 @@ public class Environment { public Environment(Settings settings) { this.settings = settings; final Path homeFile; - if (settings.get("path.home") != null) { - homeFile = PathUtils.get(cleanPath(settings.get("path.home"))); + if (PATH_HOME_SETTING.exists(settings)) { + homeFile = PathUtils.get(cleanPath(PATH_HOME_SETTING.get(settings))); } else { - throw new IllegalStateException("path.home is not configured"); + throw new IllegalStateException(PATH_HOME_SETTING.getKey() + " is not configured"); } - if (settings.get("path.conf") != null) { - configFile = PathUtils.get(cleanPath(settings.get("path.conf"))); + if (PATH_CONF_SETTING.exists(settings)) { + configFile = PathUtils.get(cleanPath(PATH_CONF_SETTING.get(settings))); } else { configFile = homeFile.resolve("config"); } - if (settings.get("path.scripts") != null) { - scriptsFile = PathUtils.get(cleanPath(settings.get("path.scripts"))); + if (PATH_SCRIPTS_SETTING.exists(settings)) { + scriptsFile = PathUtils.get(cleanPath(PATH_SCRIPTS_SETTING.get(settings))); } else { scriptsFile = configFile.resolve("scripts"); } - if (settings.get("path.plugins") != null) { - pluginsFile = PathUtils.get(cleanPath(settings.get("path.plugins"))); + if (PATH_PLUGINS_SETTING.exists(settings)) { + pluginsFile = PathUtils.get(cleanPath(PATH_PLUGINS_SETTING.get(settings))); } else { pluginsFile = homeFile.resolve("plugins"); } - String[] dataPaths = settings.getAsArray("path.data"); - if (dataPaths.length > 0) { - dataFiles = new Path[dataPaths.length]; - dataWithClusterFiles = new Path[dataPaths.length]; - for (int i = 0; i < dataPaths.length; i++) { - dataFiles[i] = PathUtils.get(dataPaths[i]); + List dataPaths = PATH_DATA_SETTING.get(settings); + if (dataPaths.isEmpty() == false) { + dataFiles = new Path[dataPaths.size()]; + dataWithClusterFiles = new Path[dataPaths.size()]; + for (int i = 0; i < dataPaths.size(); i++) { + dataFiles[i] = PathUtils.get(dataPaths.get(i)); dataWithClusterFiles[i] = dataFiles[i].resolve(ClusterName.clusterNameFromSettings(settings).value()); } } else { dataFiles = new Path[]{homeFile.resolve("data")}; dataWithClusterFiles = new Path[]{homeFile.resolve("data").resolve(ClusterName.clusterNameFromSettings(settings).value())}; } - if (settings.get("path.shared_data") != null) { - sharedDataFile = PathUtils.get(cleanPath(settings.get("path.shared_data"))); + if (PATH_SHARED_DATA_SETTING.exists(settings)) { + sharedDataFile = PathUtils.get(cleanPath(PATH_SHARED_DATA_SETTING.get(settings))); } else { sharedDataFile = null; } - String[] repoPaths = settings.getAsArray("path.repo"); - if (repoPaths.length > 0) { - repoFiles = new Path[repoPaths.length]; - for (int i = 0; i < repoPaths.length; i++) { - repoFiles[i] = PathUtils.get(repoPaths[i]); + List repoPaths = PATH_REPO_SETTING.get(settings); + if (repoPaths.isEmpty() == false) { + repoFiles = new Path[repoPaths.size()]; + for (int i = 0; i < repoPaths.size(); i++) { + repoFiles[i] = PathUtils.get(repoPaths.get(i)); } } else { repoFiles = new Path[0]; } - if (settings.get("path.logs") != null) { - logsFile = PathUtils.get(cleanPath(settings.get("path.logs"))); + if (PATH_LOGS_SETTING.exists(settings)) { + logsFile = PathUtils.get(cleanPath(PATH_LOGS_SETTING.get(settings))); } else { logsFile = homeFile.resolve("logs"); } - if (settings.get("pidfile") != null) { - pidFile = PathUtils.get(cleanPath(settings.get("pidfile"))); + if (PIDFILE_SETTING.exists(settings)) { + pidFile = PathUtils.get(cleanPath(PIDFILE_SETTING.get(settings))); } else { pidFile = null; } diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 93e95dfaa96..658a48ecc87 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -342,7 +342,7 @@ public class NodeEnvironment extends AbstractComponent implements Closeable { // resolve the directory the shard actually lives in Path p = shardPaths[i].resolve("index"); // open a directory (will be immediately closed) on the shard's location - dirs[i] = new SimpleFSDirectory(p, FsDirectoryService.buildLockFactory(indexSettings)); + dirs[i] = new SimpleFSDirectory(p, indexSettings.getValue(FsDirectoryService.INDEX_LOCK_FACTOR_SETTING)); // create a lock for the "write.lock" file try { locks[i] = dirs[i].obtainLock(IndexWriter.WRITE_LOCK_NAME); diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 117a0c6959b..c6a65ff082c 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.MultiDataPathUpgrader; import org.elasticsearch.env.NodeEnvironment; import java.nio.file.DirectoryStream; @@ -77,7 +76,6 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL if (DiscoveryNode.dataNode(settings)) { ensureNoPre019ShardState(nodeEnv); - MultiDataPathUpgrader.upgradeMultiDataPath(nodeEnv, logger); } if (DiscoveryNode.masterNode(settings) || DiscoveryNode.dataNode(settings)) { diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index 80e3be78093..af565a6002b 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoveryService; @@ -49,6 +50,21 @@ import java.util.concurrent.atomic.AtomicBoolean; */ public class GatewayService extends AbstractLifecycleComponent implements ClusterStateListener { + public static final Setting EXPECTED_NODES_SETTING = Setting.intSetting( + "gateway.expected_nodes", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting EXPECTED_DATA_NODES_SETTING = Setting.intSetting( + "gateway.expected_data_nodes", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting EXPECTED_MASTER_NODES_SETTING = Setting.intSetting( + "gateway.expected_master_nodes", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting RECOVER_AFTER_TIME_SETTING = Setting.positiveTimeSetting( + "gateway.recover_after_time", TimeValue.timeValueMillis(0), false, Setting.Scope.CLUSTER); + public static final Setting RECOVER_AFTER_NODES_SETTING = Setting.intSetting( + "gateway.recover_after_nodes", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting RECOVER_AFTER_DATA_NODES_SETTING = Setting.intSetting( + "gateway.recover_after_data_nodes", -1, -1, false, Setting.Scope.CLUSTER); + public static final Setting RECOVER_AFTER_MASTER_NODES_SETTING = Setting.intSetting( + "gateway.recover_after_master_nodes", 0, 0, false, Setting.Scope.CLUSTER); + public static final ClusterBlock STATE_NOT_RECOVERED_BLOCK = new ClusterBlock(1, "state not recovered / initialized", true, true, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL); public static final TimeValue DEFAULT_RECOVER_AFTER_TIME_IF_EXPECTED_NODES_IS_SET = TimeValue.timeValueMinutes(5); @@ -84,20 +100,26 @@ public class GatewayService extends AbstractLifecycleComponent i this.discoveryService = discoveryService; this.threadPool = threadPool; // allow to control a delay of when indices will get created - this.expectedNodes = this.settings.getAsInt("gateway.expected_nodes", -1); - this.expectedDataNodes = this.settings.getAsInt("gateway.expected_data_nodes", -1); - this.expectedMasterNodes = this.settings.getAsInt("gateway.expected_master_nodes", -1); + this.expectedNodes = EXPECTED_NODES_SETTING.get(this.settings); + this.expectedDataNodes = EXPECTED_DATA_NODES_SETTING.get(this.settings); + this.expectedMasterNodes = EXPECTED_MASTER_NODES_SETTING.get(this.settings); - TimeValue defaultRecoverAfterTime = null; - if (expectedNodes >= 0 || expectedDataNodes >= 0 || expectedMasterNodes >= 0) { - defaultRecoverAfterTime = DEFAULT_RECOVER_AFTER_TIME_IF_EXPECTED_NODES_IS_SET; + if (RECOVER_AFTER_TIME_SETTING.exists(this.settings)) { + recoverAfterTime = RECOVER_AFTER_TIME_SETTING.get(this.settings); + } else if (expectedNodes >= 0 || expectedDataNodes >= 0 || expectedMasterNodes >= 0) { + recoverAfterTime = DEFAULT_RECOVER_AFTER_TIME_IF_EXPECTED_NODES_IS_SET; + } else { + recoverAfterTime = null; } - - this.recoverAfterTime = this.settings.getAsTime("gateway.recover_after_time", defaultRecoverAfterTime); - this.recoverAfterNodes = this.settings.getAsInt("gateway.recover_after_nodes", -1); - this.recoverAfterDataNodes = this.settings.getAsInt("gateway.recover_after_data_nodes", -1); + this.recoverAfterNodes = RECOVER_AFTER_NODES_SETTING.get(this.settings); + this.recoverAfterDataNodes = RECOVER_AFTER_DATA_NODES_SETTING.get(this.settings); // default the recover after master nodes to the minimum master nodes in the discovery - this.recoverAfterMasterNodes = this.settings.getAsInt("gateway.recover_after_master_nodes", settings.getAsInt("discovery.zen.minimum_master_nodes", -1)); + if (RECOVER_AFTER_MASTER_NODES_SETTING.exists(this.settings)) { + recoverAfterMasterNodes = RECOVER_AFTER_MASTER_NODES_SETTING.get(this.settings); + } else { + // TODO: change me once the minimum_master_nodes is changed too + recoverAfterMasterNodes = settings.getAsInt("discovery.zen.minimum_master_nodes", -1); + } // Add the not recovered as initial state block, we don't allow anything until this.clusterService.addInitialStateBlock(STATE_NOT_RECOVERED_BLOCK); diff --git a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java index 83eaa791485..3d3a0e3b59c 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PrimaryShardAllocator.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; @@ -37,9 +38,11 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import java.util.stream.Collectors; /** @@ -48,15 +51,30 @@ import java.util.stream.Collectors; */ public abstract class PrimaryShardAllocator extends AbstractComponent { - @Deprecated - public static final String INDEX_RECOVERY_INITIAL_SHARDS = "index.recovery.initial_shards"; + private static final Function INITIAL_SHARDS_PARSER = (value) -> { + switch (value) { + case "quorum": + case "quorum-1": + case "half": + case "one": + case "full": + case "full-1": + case "all-1": + case "all": + return value; + default: + Integer.parseInt(value); // it can be parsed that's all we care here? + return value; + } + }; - private final String initialShards; + public static final Setting NODE_INITIAL_SHARDS_SETTING = new Setting<>("gateway.initial_shards", (settings) -> settings.get("gateway.local.initial_shards", "quorum"), INITIAL_SHARDS_PARSER, true, Setting.Scope.CLUSTER); + @Deprecated + public static final Setting INDEX_RECOVERY_INITIAL_SHARDS_SETTING = new Setting<>("index.recovery.initial_shards", (settings) -> NODE_INITIAL_SHARDS_SETTING.get(settings) , INITIAL_SHARDS_PARSER, true, Setting.Scope.INDEX); public PrimaryShardAllocator(Settings settings) { super(settings); - this.initialShards = settings.get("gateway.initial_shards", settings.get("gateway.local.initial_shards", "quorum")); - logger.debug("using initial_shards [{}]", initialShards); + logger.debug("using initial_shards [{}]", NODE_INITIAL_SHARDS_SETTING.get(settings)); } public boolean allocateUnassigned(RoutingAllocation allocation) { @@ -73,8 +91,8 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { } final IndexMetaData indexMetaData = metaData.index(shard.getIndex()); - final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings, Collections.emptyList()); - + // don't go wild here and create a new IndexSetting object for every shard this could cause a lot of garbage + // on cluster restart if we allocate a boat load of shards if (shard.allocatedPostIndexCreate(indexMetaData) == false) { // when we create a fresh index continue; @@ -90,13 +108,13 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { final Set lastActiveAllocationIds = indexMetaData.activeAllocationIds(shard.id()); final boolean snapshotRestore = shard.restoreSource() != null; - final boolean recoverOnAnyNode = recoverOnAnyNode(indexSettings); + final boolean recoverOnAnyNode = recoverOnAnyNode(indexMetaData); final NodesAndVersions nodesAndVersions; final boolean enoughAllocationsFound; if (lastActiveAllocationIds.isEmpty()) { - assert indexSettings.getIndexVersionCreated().before(Version.V_3_0_0) : "trying to allocated a primary with an empty allocation id set, but index is new"; + assert Version.indexCreated(indexMetaData.getSettings()).before(Version.V_3_0_0) : "trying to allocated a primary with an empty allocation id set, but index is new"; // when we load an old index (after upgrading cluster) or restore a snapshot of an old index // fall back to old version-based allocation mode // Note that once the shard has been active, lastActiveAllocationIds will be non-empty @@ -158,8 +176,8 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { */ protected NodesAndVersions buildAllocationIdBasedNodes(ShardRouting shard, boolean matchAnyShard, Set ignoreNodes, Set lastActiveAllocationIds, AsyncShardFetch.FetchResult shardState) { - List matchingNodes = new ArrayList<>(); - List nonMatchingNodes = new ArrayList<>(); + LinkedList matchingNodes = new LinkedList<>(); + LinkedList nonMatchingNodes = new LinkedList<>(); long highestVersion = -1; for (TransportNodesListGatewayStartedShards.NodeGatewayStartedShards nodeShardState : shardState.getData().values()) { DiscoveryNode node = nodeShardState.getNode(); @@ -183,10 +201,18 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { if (allocationId != null) { if (lastActiveAllocationIds.contains(allocationId)) { - matchingNodes.add(node); + if (nodeShardState.primary()) { + matchingNodes.addFirst(node); + } else { + matchingNodes.addLast(node); + } highestVersion = Math.max(highestVersion, nodeShardState.version()); } else if (matchAnyShard) { - nonMatchingNodes.add(node); + if (nodeShardState.primary()) { + nonMatchingNodes.addFirst(node); + } else { + nonMatchingNodes.addLast(node); + } highestVersion = Math.max(highestVersion, nodeShardState.version()); } } @@ -209,29 +235,25 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { // check if the counts meets the minimum set int requiredAllocation = 1; // if we restore from a repository one copy is more then enough - try { - String initialShards = indexMetaData.getSettings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards)); - if ("quorum".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 1) { - requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2) + 1; - } - } else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 2) { - requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2); - } - } else if ("one".equals(initialShards)) { - requiredAllocation = 1; - } else if ("full".equals(initialShards) || "all".equals(initialShards)) { - requiredAllocation = indexMetaData.getNumberOfReplicas() + 1; - } else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) { - if (indexMetaData.getNumberOfReplicas() > 1) { - requiredAllocation = indexMetaData.getNumberOfReplicas(); - } - } else { - requiredAllocation = Integer.parseInt(initialShards); + String initialShards = INDEX_RECOVERY_INITIAL_SHARDS_SETTING.get(indexMetaData.getSettings(), settings); + if ("quorum".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 1) { + requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2) + 1; } - } catch (Exception e) { - logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard); + } else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 2) { + requiredAllocation = ((1 + indexMetaData.getNumberOfReplicas()) / 2); + } + } else if ("one".equals(initialShards)) { + requiredAllocation = 1; + } else if ("full".equals(initialShards) || "all".equals(initialShards)) { + requiredAllocation = indexMetaData.getNumberOfReplicas() + 1; + } else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) { + if (indexMetaData.getNumberOfReplicas() > 1) { + requiredAllocation = indexMetaData.getNumberOfReplicas(); + } + } else { + requiredAllocation = Integer.parseInt(initialShards); } return nodesAndVersions.allocationsFound >= requiredAllocation; @@ -334,9 +356,9 @@ public abstract class PrimaryShardAllocator extends AbstractComponent { * Return {@code true} if the index is configured to allow shards to be * recovered on any node */ - private boolean recoverOnAnyNode(IndexSettings indexSettings) { - return indexSettings.isOnSharedFilesystem() - && indexSettings.getSettings().getAsBoolean(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false); + private boolean recoverOnAnyNode(IndexMetaData metaData) { + return (IndexMetaData.isOnSharedFilesystem(metaData.getSettings()) || IndexMetaData.isOnSharedFilesystem(this.settings)) + && IndexMetaData.INDEX_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE_SETTING.get(metaData.getSettings(), this.settings); } protected abstract AsyncShardFetch.FetchResult fetchData(ShardRouting shard, RoutingAllocation allocation); diff --git a/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java b/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java index 4f70bf41488..c5c5794a788 100644 --- a/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java +++ b/core/src/main/java/org/elasticsearch/gateway/PriorityComparator.java @@ -56,7 +56,7 @@ public abstract class PriorityComparator implements Comparator { } private int priority(Settings settings) { - return settings.getAsInt(IndexMetaData.SETTING_PRIORITY, 1); + return IndexMetaData.INDEX_PRIORITY_SETTING.get(settings); } private long timeCreated(Settings settings) { diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index 27ee0c17dab..6768221a044 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -130,7 +130,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction if (metaData != null) { ShardPath shardPath = null; try { - IndexSettings indexSettings = new IndexSettings(metaData, settings, Collections.emptyList()); + IndexSettings indexSettings = new IndexSettings(metaData, settings); shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, indexSettings); if (shardPath == null) { throw new IllegalStateException(shardId + " no shard path found"); @@ -139,7 +139,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction } catch (Exception exception) { logger.trace("{} can't open index for shard [{}] in path [{}]", exception, shardId, shardStateMetaData, (shardPath != null) ? shardPath.resolveIndex() : ""); String allocationId = shardStateMetaData.allocationId != null ? shardStateMetaData.allocationId.getId() : null; - return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId, exception); + return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId, shardStateMetaData.primary, exception); } } // old shard metadata doesn't have the actual index UUID so we need to check if the actual uuid in the metadata @@ -150,11 +150,11 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction } else { logger.debug("{} shard state info found: [{}]", shardId, shardStateMetaData); String allocationId = shardStateMetaData.allocationId != null ? shardStateMetaData.allocationId.getId() : null; - return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId); + return new NodeGatewayStartedShards(clusterService.localNode(), shardStateMetaData.version, allocationId, shardStateMetaData.primary); } } logger.trace("{} no local shard info found", shardId); - return new NodeGatewayStartedShards(clusterService.localNode(), -1, null); + return new NodeGatewayStartedShards(clusterService.localNode(), -1, null, false); } catch (Exception e) { throw new ElasticsearchException("failed to load started shards", e); } @@ -279,18 +279,20 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction private long version = -1; private String allocationId = null; + private boolean primary = false; private Throwable storeException = null; public NodeGatewayStartedShards() { } - public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId) { - this(node, version, allocationId, null); + public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId, boolean primary) { + this(node, version, allocationId, primary, null); } - public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId, Throwable storeException) { + public NodeGatewayStartedShards(DiscoveryNode node, long version, String allocationId, boolean primary, Throwable storeException) { super(node); this.version = version; this.allocationId = allocationId; + this.primary = primary; this.storeException = storeException; } @@ -302,6 +304,10 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction return this.allocationId; } + public boolean primary() { + return this.primary; + } + public Throwable storeException() { return this.storeException; } @@ -311,6 +317,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction super.readFrom(in); version = in.readLong(); allocationId = in.readOptionalString(); + primary = in.readBoolean(); if (in.readBoolean()) { storeException = in.readThrowable(); } @@ -321,6 +328,7 @@ public class TransportNodesListGatewayStartedShards extends TransportNodesAction super.writeTo(out); out.writeLong(version); out.writeOptionalString(allocationId); + out.writeBoolean(primary); if (storeException != null) { out.writeBoolean(true); out.writeThrowable(storeException); diff --git a/core/src/main/java/org/elasticsearch/http/HttpServer.java b/core/src/main/java/org/elasticsearch/http/HttpServer.java index 9971ce7722d..35c46f4f794 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpServer.java +++ b/core/src/main/java/org/elasticsearch/http/HttpServer.java @@ -51,7 +51,7 @@ import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; /** - * + * A component to serve http requests, backed by rest handlers. */ public class HttpServer extends AbstractLifecycleComponent { @@ -63,10 +63,6 @@ public class HttpServer extends AbstractLifecycleComponent { private final NodeService nodeService; - private final boolean disableSites; - - private final PluginSiteFilter pluginSiteFilter = new PluginSiteFilter(); - @Inject public HttpServer(Settings settings, Environment environment, HttpServerTransport transport, RestController restController, @@ -77,9 +73,6 @@ public class HttpServer extends AbstractLifecycleComponent { this.restController = restController; this.nodeService = nodeService; nodeService.setHttpServer(this); - - this.disableSites = this.settings.getAsBoolean("http.disable_sites", false); - transport.httpServerAdapter(new Dispatcher(this)); } @@ -126,27 +119,13 @@ public class HttpServer extends AbstractLifecycleComponent { } public void internalDispatchRequest(final HttpRequest request, final HttpChannel channel) { - String rawPath = request.rawPath(); - if (rawPath.startsWith("/_plugin/")) { - RestFilterChain filterChain = restController.filterChain(pluginSiteFilter); - filterChain.continueProcessing(request, channel); - return; - } else if (rawPath.equals("/favicon.ico")) { + if (request.rawPath().equals("/favicon.ico")) { handleFavicon(request, channel); return; } restController.dispatchRequest(request, channel); } - - class PluginSiteFilter extends RestFilter { - - @Override - public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws IOException { - handlePluginSite((HttpRequest) request, (HttpChannel) channel); - } - } - void handleFavicon(HttpRequest request, HttpChannel channel) { if (request.method() == RestRequest.Method.GET) { try { @@ -163,129 +142,4 @@ public class HttpServer extends AbstractLifecycleComponent { channel.sendResponse(new BytesRestResponse(FORBIDDEN)); } } - - void handlePluginSite(HttpRequest request, HttpChannel channel) throws IOException { - if (disableSites) { - channel.sendResponse(new BytesRestResponse(FORBIDDEN)); - return; - } - if (request.method() == RestRequest.Method.OPTIONS) { - // when we have OPTIONS request, simply send OK by default (with the Access Control Origin header which gets automatically added) - channel.sendResponse(new BytesRestResponse(OK)); - return; - } - if (request.method() != RestRequest.Method.GET) { - channel.sendResponse(new BytesRestResponse(FORBIDDEN)); - return; - } - // TODO for a "/_plugin" endpoint, we should have a page that lists all the plugins? - - String path = request.rawPath().substring("/_plugin/".length()); - int i1 = path.indexOf('/'); - String pluginName; - String sitePath; - if (i1 == -1) { - pluginName = path; - sitePath = null; - // If a trailing / is missing, we redirect to the right page #2654 - String redirectUrl = request.rawPath() + "/"; - BytesRestResponse restResponse = new BytesRestResponse(RestStatus.MOVED_PERMANENTLY, "text/html", ""); - restResponse.addHeader("Location", redirectUrl); - channel.sendResponse(restResponse); - return; - } else { - pluginName = path.substring(0, i1); - sitePath = path.substring(i1 + 1); - } - - // we default to index.html, or what the plugin provides (as a unix-style path) - // this is a relative path under _site configured by the plugin. - if (sitePath.length() == 0) { - sitePath = "index.html"; - } else { - // remove extraneous leading slashes, its not an absolute path. - while (sitePath.length() > 0 && sitePath.charAt(0) == '/') { - sitePath = sitePath.substring(1); - } - } - final Path siteFile = environment.pluginsFile().resolve(pluginName).resolve("_site"); - - final String separator = siteFile.getFileSystem().getSeparator(); - // Convert file separators. - sitePath = sitePath.replace("/", separator); - - Path file = siteFile.resolve(sitePath); - - // return not found instead of forbidden to prevent malicious requests to find out if files exist or dont exist - if (!Files.exists(file) || FileSystemUtils.isHidden(file) || !file.toAbsolutePath().normalize().startsWith(siteFile.toAbsolutePath().normalize())) { - channel.sendResponse(new BytesRestResponse(NOT_FOUND)); - return; - } - - BasicFileAttributes attributes = Files.readAttributes(file, BasicFileAttributes.class); - if (!attributes.isRegularFile()) { - // If it's not a dir, we send a 403 - if (!attributes.isDirectory()) { - channel.sendResponse(new BytesRestResponse(FORBIDDEN)); - return; - } - // We don't serve dir but if index.html exists in dir we should serve it - file = file.resolve("index.html"); - if (!Files.exists(file) || FileSystemUtils.isHidden(file) || !Files.isRegularFile(file)) { - channel.sendResponse(new BytesRestResponse(FORBIDDEN)); - return; - } - } - - try { - byte[] data = Files.readAllBytes(file); - channel.sendResponse(new BytesRestResponse(OK, guessMimeType(sitePath), data)); - } catch (IOException e) { - channel.sendResponse(new BytesRestResponse(INTERNAL_SERVER_ERROR)); - } - } - - - // TODO: Don't respond with a mime type that violates the request's Accept header - private String guessMimeType(String path) { - int lastDot = path.lastIndexOf('.'); - if (lastDot == -1) { - return ""; - } - String extension = path.substring(lastDot + 1).toLowerCase(Locale.ROOT); - String mimeType = DEFAULT_MIME_TYPES.get(extension); - if (mimeType == null) { - return ""; - } - return mimeType; - } - - static { - // This is not an exhaustive list, just the most common types. Call registerMimeType() to add more. - Map mimeTypes = new HashMap<>(); - mimeTypes.put("txt", "text/plain"); - mimeTypes.put("css", "text/css"); - mimeTypes.put("csv", "text/csv"); - mimeTypes.put("htm", "text/html"); - mimeTypes.put("html", "text/html"); - mimeTypes.put("xml", "text/xml"); - mimeTypes.put("js", "text/javascript"); // Technically it should be application/javascript (RFC 4329), but IE8 struggles with that - mimeTypes.put("xhtml", "application/xhtml+xml"); - mimeTypes.put("json", "application/json"); - mimeTypes.put("pdf", "application/pdf"); - mimeTypes.put("zip", "application/zip"); - mimeTypes.put("tar", "application/x-tar"); - mimeTypes.put("gif", "image/gif"); - mimeTypes.put("jpeg", "image/jpeg"); - mimeTypes.put("jpg", "image/jpeg"); - mimeTypes.put("tiff", "image/tiff"); - mimeTypes.put("tif", "image/tiff"); - mimeTypes.put("png", "image/png"); - mimeTypes.put("svg", "image/svg+xml"); - mimeTypes.put("ico", "image/vnd.microsoft.icon"); - mimeTypes.put("mp3", "audio/mpeg"); - DEFAULT_MIME_TYPES = unmodifiableMap(mimeTypes); - } - - public static final Map DEFAULT_MIME_TYPES; } diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java index 7fcc7b65fba..316799dd062 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java @@ -113,7 +113,7 @@ public class NettyHttpChannel extends HttpChannel { resp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status); } if (RestUtils.isBrowser(nettyRequest.headers().get(USER_AGENT))) { - if (transport.settings().getAsBoolean(SETTING_CORS_ENABLED, false)) { + if (SETTING_CORS_ENABLED.get(transport.settings())) { String originHeader = request.header(ORIGIN); if (!Strings.isNullOrEmpty(originHeader)) { if (corsPattern == null) { @@ -127,12 +127,12 @@ public class NettyHttpChannel extends HttpChannel { } if (nettyRequest.getMethod() == HttpMethod.OPTIONS) { // Allow Ajax requests based on the CORS "preflight" request - resp.headers().add(ACCESS_CONTROL_MAX_AGE, transport.settings().getAsInt(SETTING_CORS_MAX_AGE, 1728000)); + resp.headers().add(ACCESS_CONTROL_MAX_AGE, SETTING_CORS_MAX_AGE.get(transport.settings())); resp.headers().add(ACCESS_CONTROL_ALLOW_METHODS, transport.settings().get(SETTING_CORS_ALLOW_METHODS, "OPTIONS, HEAD, GET, POST, PUT, DELETE")); resp.headers().add(ACCESS_CONTROL_ALLOW_HEADERS, transport.settings().get(SETTING_CORS_ALLOW_HEADERS, "X-Requested-With, Content-Type, Content-Length")); } - if (transport.settings().getAsBoolean(SETTING_CORS_ALLOW_CREDENTIALS, false)) { + if (SETTING_CORS_ALLOW_CREDENTIALS.get(transport.settings())) { resp.headers().add(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true"); } } diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java index 899bbdc86e2..0cd0cef336c 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java @@ -19,7 +19,6 @@ package org.elasticsearch.http.netty; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; @@ -27,7 +26,8 @@ import org.elasticsearch.common.netty.NettyUtils; import org.elasticsearch.common.netty.OpenChannelsHandler; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.network.NetworkUtils; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Scope; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -75,9 +75,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING_SERVER; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_RECEIVE_BUFFER_SIZE; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_SEND_BUFFER_SIZE; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_KEEP_ALIVE; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_NO_DELAY; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE; @@ -94,19 +91,19 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent SETTING_CORS_ENABLED = Setting.boolSetting("http.cors.enabled", false, false, Scope.CLUSTER); public static final String SETTING_CORS_ALLOW_ORIGIN = "http.cors.allow-origin"; - public static final String SETTING_CORS_MAX_AGE = "http.cors.max-age"; + public static final Setting SETTING_CORS_MAX_AGE = Setting.intSetting("http.cors.max-age", 1728000, false, Scope.CLUSTER); public static final String SETTING_CORS_ALLOW_METHODS = "http.cors.allow-methods"; public static final String SETTING_CORS_ALLOW_HEADERS = "http.cors.allow-headers"; - public static final String SETTING_CORS_ALLOW_CREDENTIALS = "http.cors.allow-credentials"; - public static final String SETTING_PIPELINING = "http.pipelining"; + public static final Setting SETTING_CORS_ALLOW_CREDENTIALS = Setting.boolSetting("http.cors.allow-credentials", false, false, Scope.CLUSTER); + + public static final Setting SETTING_PIPELINING = Setting.boolSetting("http.pipelining", true, false, Scope.CLUSTER); public static final String SETTING_PIPELINING_MAX_EVENTS = "http.pipelining.max_events"; public static final String SETTING_HTTP_COMPRESSION = "http.compression"; public static final String SETTING_HTTP_COMPRESSION_LEVEL = "http.compression_level"; - public static final String SETTING_HTTP_DETAILED_ERRORS_ENABLED = "http.detailed_errors.enabled"; + public static final Setting SETTING_HTTP_DETAILED_ERRORS_ENABLED = Setting.boolSetting("http.detailed_errors.enabled", true, false, Scope.CLUSTER); - public static final boolean DEFAULT_SETTING_PIPELINING = true; public static final int DEFAULT_SETTING_PIPELINING_MAX_EVENTS = 10000; public static final String DEFAULT_PORT_RANGE = "9200-9300"; @@ -142,8 +139,8 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent 0) { @@ -216,7 +213,7 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent 0) { + serverBootstrap.setOption("child.tcpNoDelay", tcpNoDelay); + serverBootstrap.setOption("child.keepAlive", tcpKeepAlive); + if (tcpSendBufferSize.bytes() > 0) { + serverBootstrap.setOption("child.sendBufferSize", tcpSendBufferSize.bytes()); } - if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) { + if (tcpReceiveBufferSize.bytes() > 0) { serverBootstrap.setOption("child.receiveBufferSize", tcpReceiveBufferSize.bytes()); } serverBootstrap.setOption("receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory); @@ -308,7 +302,8 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent"index.similarity.my_similarity.type : "BM25" can be used. *
  • {@link IndexStore} - Custom {@link IndexStore} instances can be registered via {@link #addIndexStore(String, BiFunction)}
  • *
  • {@link IndexEventListener} - Custom {@link IndexEventListener} instances can be registered via {@link #addIndexEventListener(IndexEventListener)}
  • - *
  • Settings update listener - Custom settings update listener can be registered via {@link #addIndexSettingsListener(Consumer)}
  • + *
  • Settings update listener - Custom settings update listener can be registered via {@link #addSettingsUpdateConsumer(Setting, Consumer)}
  • * */ public final class IndexModule { - public static final String STORE_TYPE = "index.store.type"; + public static final Setting INDEX_STORE_TYPE_SETTING = new Setting<>("index.store.type", "", Function.identity(), false, Setting.Scope.INDEX); public static final String SIMILARITY_SETTINGS_PREFIX = "index.similarity"; public static final String INDEX_QUERY_CACHE = "index"; public static final String NONE_QUERY_CACHE = "none"; - public static final String QUERY_CACHE_TYPE = "index.queries.cache.type"; + public static final Setting INDEX_QUERY_CACHE_TYPE_SETTING = new Setting<>("index.queries.cache.type", INDEX_QUERY_CACHE, Function.identity(), false, Setting.Scope.INDEX); // for test purposes only - public static final String QUERY_CACHE_EVERYTHING = "index.queries.cache.everything"; + public static final Setting INDEX_QUERY_CACHE_EVERYTHING_SETTING = Setting.boolSetting("index.queries.cache.everything", false, false, Setting.Scope.INDEX); private final IndexSettings indexSettings; private final IndexStoreConfig indexStoreConfig; private final AnalysisRegistry analysisRegistry; // pkg private so tests can mock final SetOnce engineFactory = new SetOnce<>(); private SetOnce indexSearcherWrapper = new SetOnce<>(); - private final Set> settingsConsumers = new HashSet<>(); private final Set indexEventListeners = new HashSet<>(); private IndexEventListener listener; private final Map> similarities = new HashMap<>(); @@ -92,17 +93,13 @@ public final class IndexModule { } /** - * Adds a settings consumer for this index + * Adds a Setting and it's consumer for this index. */ - public void addIndexSettingsListener(Consumer listener) { - if (listener == null) { - throw new IllegalArgumentException("listener must not be null"); + public void addSettingsUpdateConsumer(Setting setting, Consumer consumer) { + if (setting == null) { + throw new IllegalArgumentException("setting must not be null"); } - - if (settingsConsumers.contains(listener)) { - throw new IllegalStateException("listener already registered"); - } - settingsConsumers.add(listener); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(setting, consumer); } /** @@ -245,27 +242,29 @@ public final class IndexModule { public IndexService newIndexService(NodeEnvironment environment, IndexService.ShardStoreDeleter shardStoreDeleter, NodeServicesProvider servicesProvider, MapperRegistry mapperRegistry, IndexingOperationListener... listeners) throws IOException { - final IndexSettings settings = indexSettings.newWithListener(settingsConsumers); IndexSearcherWrapperFactory searcherWrapperFactory = indexSearcherWrapper.get() == null ? (shard) -> null : indexSearcherWrapper.get(); IndexEventListener eventListener = freeze(); - final String storeType = settings.getSettings().get(STORE_TYPE); + final String storeType = indexSettings.getValue(INDEX_STORE_TYPE_SETTING); final IndexStore store; - if (storeType == null || isBuiltinType(storeType)) { - store = new IndexStore(settings, indexStoreConfig); + if (Strings.isEmpty(storeType) || isBuiltinType(storeType)) { + store = new IndexStore(indexSettings, indexStoreConfig); } else { BiFunction factory = storeTypes.get(storeType); if (factory == null) { throw new IllegalArgumentException("Unknown store type [" + storeType + "]"); } - store = factory.apply(settings, indexStoreConfig); + store = factory.apply(indexSettings, indexStoreConfig); if (store == null) { throw new IllegalStateException("store must not be null"); } } - final String queryCacheType = settings.getSettings().get(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING, store::setType); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, store::setMaxRate); + final String queryCacheType = indexSettings.getValue(INDEX_QUERY_CACHE_TYPE_SETTING); final BiFunction queryCacheProvider = queryCaches.get(queryCacheType); - final QueryCache queryCache = queryCacheProvider.apply(settings, servicesProvider.getIndicesQueryCache()); - return new IndexService(settings, environment, new SimilarityService(settings, similarities), shardStoreDeleter, analysisRegistry, engineFactory.get(), + final QueryCache queryCache = queryCacheProvider.apply(indexSettings, servicesProvider.getIndicesQueryCache()); + return new IndexService(indexSettings, environment, new SimilarityService(indexSettings, similarities), shardStoreDeleter, analysisRegistry, engineFactory.get(), servicesProvider, queryCache, store, eventListener, searcherWrapperFactory, mapperRegistry, listeners); } + } diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 7d82e167c42..cbbdaab10c8 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -25,6 +25,7 @@ import java.nio.file.Path; import java.util.HashMap; import java.util.Iterator; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; @@ -61,7 +62,6 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.search.stats.SearchSlowLog; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.IndexShard; @@ -140,7 +140,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC this.engineFactory = engineFactory; // initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE this.searcherWrapper = wrapperFactory.newWrapper(this); - this.slowLog = new IndexingSlowLog(indexSettings.getSettings()); + this.slowLog = new IndexingSlowLog(indexSettings); // Add our slowLog to the incoming IndexingOperationListeners: this.listeners = new IndexingOperationListener[1+listenersIn.length]; @@ -153,7 +153,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC this.fsyncTask = null; } this.refreshTask = new AsyncRefreshTask(this); - searchSlowLog = new SearchSlowLog(indexSettings.getSettings()); + searchSlowLog = new SearchSlowLog(indexSettings); } public int numberOfShards() { @@ -566,7 +566,6 @@ public final class IndexService extends AbstractIndexComponent implements IndexC public synchronized void updateMetaData(final IndexMetaData metadata) { if (indexSettings.updateIndexMetaData(metadata)) { - final Settings settings = indexSettings.getSettings(); for (final IndexShard shard : this.shards.values()) { try { shard.onSettingsChanged(); @@ -574,22 +573,6 @@ public final class IndexService extends AbstractIndexComponent implements IndexC logger.warn("[{}] failed to notify shard about setting change", e, shard.shardId().id()); } } - try { - indexStore.onRefreshSettings(settings); - } catch (Exception e) { - logger.warn("failed to refresh index store settings", e); - } - try { - slowLog.onRefreshSettings(settings); // this will be refactored soon anyway so duplication is ok here - } catch (Exception e) { - logger.warn("failed to refresh slowlog settings", e); - } - - try { - searchSlowLog.onRefreshSettings(settings); // this will be refactored soon anyway so duplication is ok here - } catch (Exception e) { - logger.warn("failed to refresh slowlog settings", e); - } if (refreshTask.getInterval().equals(indexSettings.getRefreshInterval()) == false) { rescheduleRefreshTasks(); } @@ -652,7 +635,9 @@ public final class IndexService extends AbstractIndexComponent implements IndexC case STARTED: case RELOCATED: try { - shard.refresh("schedule"); + if (shard.isRefreshNeeded()) { + shard.refresh("schedule"); + } } catch (EngineClosedException | AlreadyClosedException ex) { // fine - continue; } @@ -687,10 +672,12 @@ public final class IndexService extends AbstractIndexComponent implements IndexC private synchronized void onTaskCompletion() { if (mustReschedule()) { - indexService.logger.debug("scheduling {} every {}", toString(), interval); + if (indexService.logger.isTraceEnabled()) { + indexService.logger.trace("scheduling {} every {}", toString(), interval); + } this.scheduledFuture = threadPool.schedule(interval, getThreadPool(), BaseAsyncTask.this); } else { - indexService.logger.debug("scheduled {} disabled", toString()); + indexService.logger.trace("scheduled {} disabled", toString()); this.scheduledFuture = null; } } @@ -715,8 +702,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC private static boolean sameException(Exception left, Exception right) { if (left.getClass() == right.getClass()) { - if ((left.getMessage() != null && left.getMessage().equals(right.getMessage())) - || left.getMessage() == right.getMessage()) { + if (Objects.equals(left.getMessage(), right.getMessage())) { StackTraceElement[] stackTraceLeft = left.getStackTrace(); StackTraceElement[] stackTraceRight = right.getStackTrace(); if (stackTraceLeft.length == stackTraceRight.length) { diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index 04c45c24a91..17fe4ef499d 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -25,6 +25,8 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -32,45 +34,65 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.translog.Translog; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; import java.util.Locale; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; +import java.util.function.Function; import java.util.function.Predicate; /** * This class encapsulates all index level settings and handles settings updates. * It's created per index and available to all index level classes and allows them to retrieve * the latest updated settings instance. Classes that need to listen to settings updates can register - * a settings consumer at index creation via {@link IndexModule#addIndexSettingsListener(Consumer)} that will + * a settings consumer at index creation via {@link IndexModule#addSettingsUpdateConsumer(Setting, Consumer)} that will * be called for each settings update. */ public final class IndexSettings { - public static final String DEFAULT_FIELD = "index.query.default_field"; - public static final String QUERY_STRING_LENIENT = "index.query_string.lenient"; - public static final String QUERY_STRING_ANALYZE_WILDCARD = "indices.query.query_string.analyze_wildcard"; - public static final String QUERY_STRING_ALLOW_LEADING_WILDCARD = "indices.query.query_string.allowLeadingWildcard"; - public static final String ALLOW_UNMAPPED = "index.query.parse.allow_unmapped_fields"; - public static final String INDEX_TRANSLOG_SYNC_INTERVAL = "index.translog.sync_interval"; - public static final String INDEX_TRANSLOG_DURABILITY = "index.translog.durability"; - public static final String INDEX_REFRESH_INTERVAL = "index.refresh_interval"; + public static final Setting DEFAULT_FIELD_SETTING = new Setting<>("index.query.default_field", AllFieldMapper.NAME, Function.identity(), false, Setting.Scope.INDEX); + public static final Setting QUERY_STRING_LENIENT_SETTING = Setting.boolSetting("index.query_string.lenient", false, false, Setting.Scope.INDEX); + public static final Setting QUERY_STRING_ANALYZE_WILDCARD = Setting.boolSetting("indices.query.query_string.analyze_wildcard", false, false, Setting.Scope.CLUSTER); + public static final Setting QUERY_STRING_ALLOW_LEADING_WILDCARD = Setting.boolSetting("indices.query.query_string.allowLeadingWildcard", true, false, Setting.Scope.CLUSTER); + public static final Setting ALLOW_UNMAPPED = Setting.boolSetting("index.query.parse.allow_unmapped_fields", true, false, Setting.Scope.INDEX); + public static final Setting INDEX_TRANSLOG_SYNC_INTERVAL_SETTING = Setting.timeSetting("index.translog.sync_interval", TimeValue.timeValueSeconds(5), false, Setting.Scope.INDEX); + public static final Setting INDEX_TRANSLOG_DURABILITY_SETTING = new Setting<>("index.translog.durability", Translog.Durability.REQUEST.name(), (value) -> Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)), true, Setting.Scope.INDEX); + public static final Setting INDEX_WARMER_ENABLED_SETTING = Setting.boolSetting("index.warmer.enabled", true, true, Setting.Scope.INDEX); + public static final Setting INDEX_TTL_DISABLE_PURGE_SETTING = Setting.boolSetting("index.ttl.disable_purge", false, true, Setting.Scope.INDEX); + public static final Setting INDEX_CHECK_ON_STARTUP = new Setting<>("index.shard.check_on_startup", "false", (s) -> { + switch(s) { + case "false": + case "true": + case "fix": + case "checksum": + return s; + default: + throw new IllegalArgumentException("unknown value for [index.shard.check_on_startup] must be one of [true, false, fix, checksum] but was: " + s); + } + }, false, Setting.Scope.INDEX); + + /** + * Index setting describing the maximum value of from + size on a query. + * The Default maximum value of from + size on a query is 10,000. This was chosen as + * a conservative default as it is sure to not cause trouble. Users can + * certainly profile their cluster and decide to set it to 100,000 + * safely. 1,000,000 is probably way to high for any cluster to set + * safely. + */ + public static final Setting MAX_RESULT_WINDOW_SETTING = Setting.intSetting("index.max_result_window", 10000, 1, true, Setting.Scope.INDEX); public static final TimeValue DEFAULT_REFRESH_INTERVAL = new TimeValue(1, TimeUnit.SECONDS); - public static final String INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE = "index.translog.flush_threshold_size"; - public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60); + public static final Setting INDEX_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.refresh_interval", DEFAULT_REFRESH_INTERVAL, new TimeValue(-1, TimeUnit.MILLISECONDS), true, Setting.Scope.INDEX); + public static final Setting INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING = Setting.byteSizeSetting("index.translog.flush_threshold_size", new ByteSizeValue(512, ByteSizeUnit.MB), true, Setting.Scope.INDEX); + /** * Index setting to enable / disable deletes garbage collection. * This setting is realtime updateable */ - public static final String INDEX_GC_DELETES_SETTING = "index.gc_deletes"; + public static final TimeValue DEFAULT_GC_DELETES = TimeValue.timeValueSeconds(60); + public static final Setting INDEX_GC_DELETES_SETTING = Setting.timeSetting("index.gc_deletes", DEFAULT_GC_DELETES, new TimeValue(-1, TimeUnit.MILLISECONDS), true, Setting.Scope.INDEX); private final String uuid; - private final List> updateListeners; private final Index index; private final Version version; private final ESLogger logger; @@ -94,9 +116,11 @@ public final class IndexSettings { private volatile ByteSizeValue flushThresholdSize; private final MergeSchedulerConfig mergeSchedulerConfig; private final MergePolicyConfig mergePolicyConfig; - + private final IndexScopedSettings scopedSettings; private long gcDeletesInMillis = DEFAULT_GC_DELETES.millis(); - + private volatile boolean warmerEnabled; + private volatile int maxResultWindow; + private volatile boolean TTLPurgeDisabled; /** * Returns the default search field for this index. @@ -139,10 +163,9 @@ public final class IndexSettings { * * @param indexMetaData the index metadata this settings object is associated with * @param nodeSettings the nodes settings this index is allocated on. - * @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated */ - public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection> updateListeners) { - this(indexMetaData, nodeSettings, updateListeners, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex())); + public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings) { + this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); } /** @@ -151,13 +174,12 @@ public final class IndexSettings { * * @param indexMetaData the index metadata this settings object is associated with * @param nodeSettings the nodes settings this index is allocated on. - * @param updateListeners a collection of listeners / consumers that should be notified if one or more settings are updated * @param indexNameMatcher a matcher that can resolve an expression to the index name or index alias */ - public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Collection> updateListeners, final Predicate indexNameMatcher) { + public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Predicate indexNameMatcher, IndexScopedSettings indexScopedSettings) { + scopedSettings = indexScopedSettings.copy(nodeSettings, indexMetaData); this.nodeSettings = nodeSettings; this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build(); - this.updateListeners = Collections.unmodifiableList( new ArrayList<>(updateListeners)); this.index = new Index(indexMetaData.getIndex()); version = Version.indexCreated(settings); uuid = settings.get(IndexMetaData.SETTING_INDEX_UUID, IndexMetaData.INDEX_UUID_NA_VALUE); @@ -167,32 +189,55 @@ public final class IndexSettings { numberOfShards = settings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_SHARDS, null); isShadowReplicaIndex = IndexMetaData.isIndexUsingShadowReplicas(settings); - this.defaultField = settings.get(DEFAULT_FIELD, AllFieldMapper.NAME); - this.queryStringLenient = settings.getAsBoolean(QUERY_STRING_LENIENT, false); - this.queryStringAnalyzeWildcard = settings.getAsBoolean(QUERY_STRING_ANALYZE_WILDCARD, false); - this.queryStringAllowLeadingWildcard = settings.getAsBoolean(QUERY_STRING_ALLOW_LEADING_WILDCARD, true); + this.defaultField = DEFAULT_FIELD_SETTING.get(settings); + this.queryStringLenient = QUERY_STRING_LENIENT_SETTING.get(settings); + this.queryStringAnalyzeWildcard = QUERY_STRING_ANALYZE_WILDCARD.get(nodeSettings); + this.queryStringAllowLeadingWildcard = QUERY_STRING_ALLOW_LEADING_WILDCARD.get(nodeSettings); this.parseFieldMatcher = new ParseFieldMatcher(settings); - this.defaultAllowUnmappedFields = settings.getAsBoolean(ALLOW_UNMAPPED, true); + this.defaultAllowUnmappedFields = scopedSettings.get(ALLOW_UNMAPPED); this.indexNameMatcher = indexNameMatcher; - final String value = settings.get(INDEX_TRANSLOG_DURABILITY, Translog.Durability.REQUEST.name()); - this.durability = getFromSettings(settings, Translog.Durability.REQUEST); - syncInterval = settings.getAsTime(INDEX_TRANSLOG_SYNC_INTERVAL, TimeValue.timeValueSeconds(5)); - refreshInterval = settings.getAsTime(INDEX_REFRESH_INTERVAL, DEFAULT_REFRESH_INTERVAL); - flushThresholdSize = settings.getAsBytesSize(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB)); - mergeSchedulerConfig = new MergeSchedulerConfig(settings); - gcDeletesInMillis = settings.getAsTime(IndexSettings.INDEX_GC_DELETES_SETTING, DEFAULT_GC_DELETES).getMillis(); - this.mergePolicyConfig = new MergePolicyConfig(logger, settings); + this.durability = scopedSettings.get(INDEX_TRANSLOG_DURABILITY_SETTING); + syncInterval = INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.get(settings); + refreshInterval = scopedSettings.get(INDEX_REFRESH_INTERVAL_SETTING); + flushThresholdSize = scopedSettings.get(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING); + mergeSchedulerConfig = new MergeSchedulerConfig(this); + gcDeletesInMillis = scopedSettings.get(INDEX_GC_DELETES_SETTING).getMillis(); + warmerEnabled = scopedSettings.get(INDEX_WARMER_ENABLED_SETTING); + maxResultWindow = scopedSettings.get(MAX_RESULT_WINDOW_SETTING); + TTLPurgeDisabled = scopedSettings.get(INDEX_TTL_DISABLE_PURGE_SETTING); + this.mergePolicyConfig = new MergePolicyConfig(logger, this); assert indexNameMatcher.test(indexMetaData.getIndex()); + + scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, mergePolicyConfig::setNoCFSRatio); + scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, mergePolicyConfig::setExpungeDeletesAllowed); + scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING, mergePolicyConfig::setFloorSegmentSetting); + scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING, mergePolicyConfig::setMaxMergesAtOnce); + scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING, mergePolicyConfig::setMaxMergesAtOnceExplicit); + scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING, mergePolicyConfig::setMaxMergedSegment); + scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING, mergePolicyConfig::setSegmentsPerTier); + scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING, mergePolicyConfig::setReclaimDeletesWeight); + scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING, mergeSchedulerConfig::setMaxThreadCount); + scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING, mergeSchedulerConfig::setMaxMergeCount); + scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.AUTO_THROTTLE_SETTING, mergeSchedulerConfig::setAutoThrottle); + scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_DURABILITY_SETTING, this::setTranslogDurability); + scopedSettings.addSettingsUpdateConsumer(INDEX_TTL_DISABLE_PURGE_SETTING, this::setTTLPurgeDisabled); + scopedSettings.addSettingsUpdateConsumer(MAX_RESULT_WINDOW_SETTING, this::setMaxResultWindow); + scopedSettings.addSettingsUpdateConsumer(INDEX_WARMER_ENABLED_SETTING, this::setEnableWarmer); + scopedSettings.addSettingsUpdateConsumer(INDEX_GC_DELETES_SETTING, this::setGCDeletes); + scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING, this::setTranslogFlushThresholdSize); + scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval); } + private void setTranslogFlushThresholdSize(ByteSizeValue byteSizeValue) { + this.flushThresholdSize = byteSizeValue; + } - /** - * Creates a new {@link IndexSettings} instance adding the given listeners to the settings - */ - IndexSettings newWithListener(final Collection> updateListeners) { - ArrayList> newUpdateListeners = new ArrayList<>(updateListeners); - newUpdateListeners.addAll(this.updateListeners); - return new IndexSettings(indexMetaData, nodeSettings, newUpdateListeners, indexNameMatcher); + private void setGCDeletes(TimeValue timeValue) { + this.gcDeletesInMillis = timeValue.getMillis(); + } + + private void setRefreshInterval(TimeValue timeValue) { + this.refreshInterval = timeValue; } /** @@ -321,33 +366,15 @@ public final class IndexSettings { } this.indexMetaData = indexMetaData; final Settings existingSettings = this.settings; - if (existingSettings.getByPrefix(IndexMetaData.INDEX_SETTING_PREFIX).getAsMap().equals(newSettings.getByPrefix(IndexMetaData.INDEX_SETTING_PREFIX).getAsMap())) { + if (existingSettings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE).getAsMap().equals(newSettings.filter(IndexScopedSettings.INDEX_SETTINGS_KEY_PREDICATE).getAsMap())) { // nothing to update, same settings return false; } - final Settings mergedSettings = this.settings = Settings.builder().put(nodeSettings).put(newSettings).build(); - for (final Consumer consumer : updateListeners) { - try { - consumer.accept(mergedSettings); - } catch (Exception e) { - logger.warn("failed to refresh index settings for [{}]", e, mergedSettings); - } - } - try { - updateSettings(mergedSettings); - } catch (Exception e) { - logger.warn("failed to refresh index settings for [{}]", e, mergedSettings); - } + scopedSettings.applySettings(newSettings); + this.settings = Settings.builder().put(nodeSettings).put(newSettings).build(); return true; } - /** - * Returns all settings update consumers - */ - List> getUpdateListeners() { // for testing - return updateListeners; - } - /** * Returns the translog durability for this index. */ @@ -355,60 +382,19 @@ public final class IndexSettings { return durability; } - private Translog.Durability getFromSettings(Settings settings, Translog.Durability defaultValue) { - final String value = settings.get(INDEX_TRANSLOG_DURABILITY, defaultValue.name()); - try { - return Translog.Durability.valueOf(value.toUpperCase(Locale.ROOT)); - } catch (IllegalArgumentException ex) { - logger.warn("Can't apply {} illegal value: {} using {} instead, use one of: {}", INDEX_TRANSLOG_DURABILITY, value, defaultValue, Arrays.toString(Translog.Durability.values())); - return defaultValue; - } + private void setTranslogDurability(Translog.Durability durability) { + this.durability = durability; } - private void updateSettings(Settings settings) { - final Translog.Durability durability = getFromSettings(settings, this.durability); - if (durability != this.durability) { - logger.info("updating durability from [{}] to [{}]", this.durability, durability); - this.durability = durability; - } + /** + * Returns true if index warmers are enabled, otherwise false + */ + public boolean isWarmerEnabled() { + return warmerEnabled; + } - TimeValue refreshInterval = settings.getAsTime(IndexSettings.INDEX_REFRESH_INTERVAL, this.refreshInterval); - if (!refreshInterval.equals(this.refreshInterval)) { - logger.info("updating refresh_interval from [{}] to [{}]", this.refreshInterval, refreshInterval); - this.refreshInterval = refreshInterval; - } - - ByteSizeValue flushThresholdSize = settings.getAsBytesSize(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, this.flushThresholdSize); - if (!flushThresholdSize.equals(this.flushThresholdSize)) { - logger.info("updating flush_threshold_size from [{}] to [{}]", this.flushThresholdSize, flushThresholdSize); - this.flushThresholdSize = flushThresholdSize; - } - - final int maxThreadCount = settings.getAsInt(MergeSchedulerConfig.MAX_THREAD_COUNT, mergeSchedulerConfig.getMaxThreadCount()); - if (maxThreadCount != mergeSchedulerConfig.getMaxThreadCount()) { - logger.info("updating [{}] from [{}] to [{}]", MergeSchedulerConfig.MAX_THREAD_COUNT, mergeSchedulerConfig.getMaxMergeCount(), maxThreadCount); - mergeSchedulerConfig.setMaxThreadCount(maxThreadCount); - } - - final int maxMergeCount = settings.getAsInt(MergeSchedulerConfig.MAX_MERGE_COUNT, mergeSchedulerConfig.getMaxMergeCount()); - if (maxMergeCount != mergeSchedulerConfig.getMaxMergeCount()) { - logger.info("updating [{}] from [{}] to [{}]", MergeSchedulerConfig.MAX_MERGE_COUNT, mergeSchedulerConfig.getMaxMergeCount(), maxMergeCount); - mergeSchedulerConfig.setMaxMergeCount(maxMergeCount); - } - - final boolean autoThrottle = settings.getAsBoolean(MergeSchedulerConfig.AUTO_THROTTLE, mergeSchedulerConfig.isAutoThrottle()); - if (autoThrottle != mergeSchedulerConfig.isAutoThrottle()) { - logger.info("updating [{}] from [{}] to [{}]", MergeSchedulerConfig.AUTO_THROTTLE, mergeSchedulerConfig.isAutoThrottle(), autoThrottle); - mergeSchedulerConfig.setAutoThrottle(autoThrottle); - } - - long gcDeletesInMillis = settings.getAsTime(IndexSettings.INDEX_GC_DELETES_SETTING, TimeValue.timeValueMillis(this.gcDeletesInMillis)).getMillis(); - if (gcDeletesInMillis != this.gcDeletesInMillis) { - logger.info("updating {} from [{}] to [{}]", IndexSettings.INDEX_GC_DELETES_SETTING, TimeValue.timeValueMillis(this.gcDeletesInMillis), TimeValue.timeValueMillis(gcDeletesInMillis)); - this.gcDeletesInMillis = gcDeletesInMillis; - } - - mergePolicyConfig.onRefreshSettings(settings); + private void setEnableWarmer(boolean enableWarmer) { + this.warmerEnabled = enableWarmer; } /** @@ -436,6 +422,18 @@ public final class IndexSettings { */ public MergeSchedulerConfig getMergeSchedulerConfig() { return mergeSchedulerConfig; } + /** + * Returns the max result window for search requests, describing the maximum value of from + size on a query. + */ + public int getMaxResultWindow() { + return this.maxResultWindow; + } + + private void setMaxResultWindow(int maxResultWindow) { + this.maxResultWindow = maxResultWindow; + } + + /** * Returns the GC deletes cycle in milliseconds. */ @@ -450,4 +448,22 @@ public final class IndexSettings { return mergePolicyConfig.getMergePolicy(); } + /** + * Returns true if the TTL purge is disabled for this index. Default is false + */ + public boolean isTTLPurgeDisabled() { + return TTLPurgeDisabled; + } + + private void setTTLPurgeDisabled(boolean ttlPurgeDisabled) { + this.TTLPurgeDisabled = ttlPurgeDisabled; + } + + + public T getValue(Setting setting) { + return scopedSettings.get(setting); + } + + + IndexScopedSettings getScopedSettings() { return scopedSettings;} } diff --git a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index 5cd3685b2f8..5452daa7f07 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/core/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.engine.Engine; @@ -31,15 +31,12 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.shard.IndexingOperationListener; import java.io.IOException; -import java.util.Locale; import java.util.concurrent.TimeUnit; /** */ public final class IndexingSlowLog implements IndexingOperationListener { - private boolean reformat; - private long indexWarnThreshold; private long indexInfoThreshold; private long indexDebugThreshold; @@ -51,21 +48,33 @@ public final class IndexingSlowLog implements IndexingOperationListener { */ private int maxSourceCharsToLog; - private String level; + private SlowLogLevel level; private final ESLogger indexLogger; private final ESLogger deleteLogger; private static final String INDEX_INDEXING_SLOWLOG_PREFIX = "index.indexing.slowlog"; - public static final String INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN = INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.warn"; - public static final String INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO = INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.info"; - public static final String INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG = INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.debug"; - public static final String INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE = INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.trace"; - public static final String INDEX_INDEXING_SLOWLOG_REFORMAT = INDEX_INDEXING_SLOWLOG_PREFIX +".reformat"; - public static final String INDEX_INDEXING_SLOWLOG_LEVEL = INDEX_INDEXING_SLOWLOG_PREFIX +".level"; - public static final String INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG = INDEX_INDEXING_SLOWLOG_PREFIX + ".source"; + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.warn", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.info", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.debug", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING = Setting.timeSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".threshold.index.trace", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING = Setting.boolSetting(INDEX_INDEXING_SLOWLOG_PREFIX +".reformat", true, true, Setting.Scope.INDEX); + public static final Setting INDEX_INDEXING_SLOWLOG_LEVEL_SETTING = new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX +".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, true, Setting.Scope.INDEX); + /** + * Reads how much of the source to log. The user can specify any value they + * like and numbers are interpreted the maximum number of characters to log + * and everything else is interpreted as Elasticsearch interprets booleans + * which is then converted to 0 for false and Integer.MAX_VALUE for true. + */ + public static final Setting INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING = new Setting<>(INDEX_INDEXING_SLOWLOG_PREFIX + ".source", "1000", (value) -> { + try { + return Integer.parseInt(value, 10); + } catch (NumberFormatException e) { + return Booleans.parseBoolean(value, true) ? Integer.MAX_VALUE : 0; + } + }, true, Setting.Scope.INDEX); - IndexingSlowLog(Settings indexSettings) { + IndexingSlowLog(IndexSettings indexSettings) { this(indexSettings, Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index"), Loggers.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".delete")); } @@ -73,77 +82,62 @@ public final class IndexingSlowLog implements IndexingOperationListener { /** * Build with the specified loggers. Only used to testing. */ - IndexingSlowLog(Settings indexSettings, ESLogger indexLogger, ESLogger deleteLogger) { + IndexingSlowLog(IndexSettings indexSettings, ESLogger indexLogger, ESLogger deleteLogger) { this.indexLogger = indexLogger; this.deleteLogger = deleteLogger; - this.reformat = indexSettings.getAsBoolean(INDEX_INDEXING_SLOWLOG_REFORMAT, true); - this.indexWarnThreshold = indexSettings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN, TimeValue.timeValueNanos(-1)).nanos(); - this.indexInfoThreshold = indexSettings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO, TimeValue.timeValueNanos(-1)).nanos(); - this.indexDebugThreshold = indexSettings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG, TimeValue.timeValueNanos(-1)).nanos(); - this.indexTraceThreshold = indexSettings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE, TimeValue.timeValueNanos(-1)).nanos(); - this.level = indexSettings.get(INDEX_INDEXING_SLOWLOG_LEVEL, "TRACE").toUpperCase(Locale.ROOT); - this.maxSourceCharsToLog = readSourceToLog(indexSettings); - - indexLogger.setLevel(level); - deleteLogger.setLevel(level); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING, this::setReformat); + this.reformat = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING, this::setWarnThreshold); + this.indexWarnThreshold = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING, this::setInfoThreshold); + this.indexInfoThreshold = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING, this::setDebugThreshold); + this.indexDebugThreshold = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING, this::setTraceThreshold); + this.indexTraceThreshold = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_LEVEL_SETTING, this::setLevel); + setLevel(indexSettings.getValue(INDEX_INDEXING_SLOWLOG_LEVEL_SETTING)); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING, this::setMaxSourceCharsToLog); + this.maxSourceCharsToLog = indexSettings.getValue(INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG_SETTING); } - synchronized void onRefreshSettings(Settings settings) { - long indexWarnThreshold = settings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN, TimeValue.timeValueNanos(this.indexWarnThreshold)).nanos(); - if (indexWarnThreshold != this.indexWarnThreshold) { - this.indexWarnThreshold = indexWarnThreshold; - } - long indexInfoThreshold = settings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO, TimeValue.timeValueNanos(this.indexInfoThreshold)).nanos(); - if (indexInfoThreshold != this.indexInfoThreshold) { - this.indexInfoThreshold = indexInfoThreshold; - } - long indexDebugThreshold = settings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG, TimeValue.timeValueNanos(this.indexDebugThreshold)).nanos(); - if (indexDebugThreshold != this.indexDebugThreshold) { - this.indexDebugThreshold = indexDebugThreshold; - } - long indexTraceThreshold = settings.getAsTime(INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE, TimeValue.timeValueNanos(this.indexTraceThreshold)).nanos(); - if (indexTraceThreshold != this.indexTraceThreshold) { - this.indexTraceThreshold = indexTraceThreshold; - } - - String level = settings.get(INDEX_INDEXING_SLOWLOG_LEVEL, this.level); - if (!level.equals(this.level)) { - this.indexLogger.setLevel(level.toUpperCase(Locale.ROOT)); - this.deleteLogger.setLevel(level.toUpperCase(Locale.ROOT)); - this.level = level; - } - - boolean reformat = settings.getAsBoolean(INDEX_INDEXING_SLOWLOG_REFORMAT, this.reformat); - if (reformat != this.reformat) { - this.reformat = reformat; - } - - int maxSourceCharsToLog = readSourceToLog(settings); - if (maxSourceCharsToLog != this.maxSourceCharsToLog) { - this.maxSourceCharsToLog = maxSourceCharsToLog; - } + private void setMaxSourceCharsToLog(int maxSourceCharsToLog) { + this.maxSourceCharsToLog = maxSourceCharsToLog; } + private void setLevel(SlowLogLevel level) { + this.level = level; + this.indexLogger.setLevel(level.name()); + this.deleteLogger.setLevel(level.name()); + } + + private void setWarnThreshold(TimeValue warnThreshold) { + this.indexWarnThreshold = warnThreshold.nanos(); + } + + private void setInfoThreshold(TimeValue infoThreshold) { + this.indexInfoThreshold = infoThreshold.nanos(); + } + + private void setDebugThreshold(TimeValue debugThreshold) { + this.indexDebugThreshold = debugThreshold.nanos(); + } + + private void setTraceThreshold(TimeValue traceThreshold) { + this.indexTraceThreshold = traceThreshold.nanos(); + } + + private void setReformat(boolean reformat) { + this.reformat = reformat; + } + + public void postIndex(Engine.Index index) { final long took = index.endTime() - index.startTime(); postIndexing(index.parsedDoc(), took); } - /** - * Reads how much of the source to log. The user can specify any value they - * like and numbers are interpreted the maximum number of characters to log - * and everything else is interpreted as Elasticsearch interprets booleans - * which is then converted to 0 for false and Integer.MAX_VALUE for true. - */ - private int readSourceToLog(Settings settings) { - String sourceToLog = settings.get(INDEX_INDEXING_SLOWLOG_MAX_SOURCE_CHARS_TO_LOG, "1000"); - try { - return Integer.parseInt(sourceToLog, 10); - } catch (NumberFormatException e) { - return Booleans.parseBoolean(sourceToLog, true) ? Integer.MAX_VALUE : 0; - } - } private void postIndexing(ParsedDocument doc, long tookInNanos) { if (indexWarnThreshold >= 0 && tookInNanos > indexWarnThreshold) { @@ -194,4 +188,33 @@ public final class IndexingSlowLog implements IndexingOperationListener { return sb.toString(); } } + + boolean isReformat() { + return reformat; + } + + long getIndexWarnThreshold() { + return indexWarnThreshold; + } + + long getIndexInfoThreshold() { + return indexInfoThreshold; + } + + long getIndexTraceThreshold() { + return indexTraceThreshold; + } + + long getIndexDebugThreshold() { + return indexDebugThreshold; + } + + int getMaxSourceCharsToLog() { + return maxSourceCharsToLog; + } + + SlowLogLevel getLevel() { + return level; + } + } diff --git a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java index d3b90b131f5..fc9f30cf3fd 100644 --- a/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java +++ b/core/src/main/java/org/elasticsearch/index/MergePolicyConfig.java @@ -23,7 +23,8 @@ import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.TieredMergePolicy; import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -117,7 +118,6 @@ public final class MergePolicyConfig { private final TieredMergePolicy mergePolicy = new TieredMergePolicy(); private final ESLogger logger; private final boolean mergesEnabled; - private volatile double noCFSRatio; public static final double DEFAULT_EXPUNGE_DELETES_ALLOWED = 10d; public static final ByteSizeValue DEFAULT_FLOOR_SEGMENT = new ByteSizeValue(2, ByteSizeUnit.MB); @@ -126,35 +126,35 @@ public final class MergePolicyConfig { public static final ByteSizeValue DEFAULT_MAX_MERGED_SEGMENT = new ByteSizeValue(5, ByteSizeUnit.GB); public static final double DEFAULT_SEGMENTS_PER_TIER = 10.0d; public static final double DEFAULT_RECLAIM_DELETES_WEIGHT = 2.0d; - public static final String INDEX_COMPOUND_FORMAT = "index.compound_format"; + public static final Setting INDEX_COMPOUND_FORMAT_SETTING = new Setting<>("index.compound_format", Double.toString(TieredMergePolicy.DEFAULT_NO_CFS_RATIO), MergePolicyConfig::parseNoCFSRatio, true, Setting.Scope.INDEX); - public static final String INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED = "index.merge.policy.expunge_deletes_allowed"; - public static final String INDEX_MERGE_POLICY_FLOOR_SEGMENT = "index.merge.policy.floor_segment"; - public static final String INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE = "index.merge.policy.max_merge_at_once"; - public static final String INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT = "index.merge.policy.max_merge_at_once_explicit"; - public static final String INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT = "index.merge.policy.max_merged_segment"; - public static final String INDEX_MERGE_POLICY_SEGMENTS_PER_TIER = "index.merge.policy.segments_per_tier"; - public static final String INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT = "index.merge.policy.reclaim_deletes_weight"; - public static final String INDEX_MERGE_ENABLED = "index.merge.enabled"; + public static final Setting INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING = Setting.doubleSetting("index.merge.policy.expunge_deletes_allowed", DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING = Setting.byteSizeSetting("index.merge.policy.floor_segment", DEFAULT_FLOOR_SEGMENT, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING = Setting.intSetting("index.merge.policy.max_merge_at_once", DEFAULT_MAX_MERGE_AT_ONCE, 2, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING = Setting.intSetting("index.merge.policy.max_merge_at_once_explicit", DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT, 2, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING = Setting.byteSizeSetting("index.merge.policy.max_merged_segment", DEFAULT_MAX_MERGED_SEGMENT, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING = Setting.doubleSetting("index.merge.policy.segments_per_tier", DEFAULT_SEGMENTS_PER_TIER, 2.0d, true, Setting.Scope.INDEX); + public static final Setting INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING = Setting.doubleSetting("index.merge.policy.reclaim_deletes_weight", DEFAULT_RECLAIM_DELETES_WEIGHT, 0.0d, true, Setting.Scope.INDEX); + public static final String INDEX_MERGE_ENABLED = "index.merge.enabled"; // don't convert to Setting<> and register... we only set this in tests and register via a plugin - MergePolicyConfig(ESLogger logger, Settings indexSettings) { + MergePolicyConfig(ESLogger logger, IndexSettings indexSettings) { this.logger = logger; - this.noCFSRatio = parseNoCFSRatio(indexSettings.get(INDEX_COMPOUND_FORMAT, Double.toString(TieredMergePolicy.DEFAULT_NO_CFS_RATIO))); - double forceMergeDeletesPctAllowed = indexSettings.getAsDouble("index.merge.policy.expunge_deletes_allowed", DEFAULT_EXPUNGE_DELETES_ALLOWED); // percentage - ByteSizeValue floorSegment = indexSettings.getAsBytesSize("index.merge.policy.floor_segment", DEFAULT_FLOOR_SEGMENT); - int maxMergeAtOnce = indexSettings.getAsInt("index.merge.policy.max_merge_at_once", DEFAULT_MAX_MERGE_AT_ONCE); - int maxMergeAtOnceExplicit = indexSettings.getAsInt("index.merge.policy.max_merge_at_once_explicit", DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT); + IndexScopedSettings scopedSettings = indexSettings.getScopedSettings(); + double forceMergeDeletesPctAllowed = indexSettings.getValue(INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING); // percentage + ByteSizeValue floorSegment = indexSettings.getValue(INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING); + int maxMergeAtOnce = indexSettings.getValue(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING); + int maxMergeAtOnceExplicit = indexSettings.getValue(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING); // TODO is this really a good default number for max_merge_segment, what happens for large indices, won't they end up with many segments? - ByteSizeValue maxMergedSegment = indexSettings.getAsBytesSize("index.merge.policy.max_merged_segment", DEFAULT_MAX_MERGED_SEGMENT); - double segmentsPerTier = indexSettings.getAsDouble("index.merge.policy.segments_per_tier", DEFAULT_SEGMENTS_PER_TIER); - double reclaimDeletesWeight = indexSettings.getAsDouble("index.merge.policy.reclaim_deletes_weight", DEFAULT_RECLAIM_DELETES_WEIGHT); - this.mergesEnabled = indexSettings.getAsBoolean(INDEX_MERGE_ENABLED, true); + ByteSizeValue maxMergedSegment = indexSettings.getValue(INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING); + double segmentsPerTier = indexSettings.getValue(INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING); + double reclaimDeletesWeight = indexSettings.getValue(INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING); + this.mergesEnabled = indexSettings.getSettings().getAsBoolean(INDEX_MERGE_ENABLED, true); if (mergesEnabled == false) { logger.warn("[{}] is set to false, this should only be used in tests and can cause serious problems in production environments", INDEX_MERGE_ENABLED); } maxMergeAtOnce = adjustMaxMergeAtOnceIfNeeded(maxMergeAtOnce, segmentsPerTier); - mergePolicy.setNoCFSRatio(noCFSRatio); + mergePolicy.setNoCFSRatio(indexSettings.getValue(INDEX_COMPOUND_FORMAT_SETTING)); mergePolicy.setForceMergeDeletesPctAllowed(forceMergeDeletesPctAllowed); mergePolicy.setFloorSegmentMB(floorSegment.mbFrac()); mergePolicy.setMaxMergeAtOnce(maxMergeAtOnce); @@ -162,8 +162,42 @@ public final class MergePolicyConfig { mergePolicy.setMaxMergedSegmentMB(maxMergedSegment.mbFrac()); mergePolicy.setSegmentsPerTier(segmentsPerTier); mergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight); - logger.debug("using [tiered] merge mergePolicy with expunge_deletes_allowed[{}], floor_segment[{}], max_merge_at_once[{}], max_merge_at_once_explicit[{}], max_merged_segment[{}], segments_per_tier[{}], reclaim_deletes_weight[{}]", + if (logger.isTraceEnabled()) { + logger.trace("using [tiered] merge mergePolicy with expunge_deletes_allowed[{}], floor_segment[{}], max_merge_at_once[{}], max_merge_at_once_explicit[{}], max_merged_segment[{}], segments_per_tier[{}], reclaim_deletes_weight[{}]", forceMergeDeletesPctAllowed, floorSegment, maxMergeAtOnce, maxMergeAtOnceExplicit, maxMergedSegment, segmentsPerTier, reclaimDeletesWeight); + } + } + + void setReclaimDeletesWeight(Double reclaimDeletesWeight) { + mergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight); + } + + void setSegmentsPerTier(Double segmentsPerTier) { + mergePolicy.setSegmentsPerTier(segmentsPerTier); + } + + void setMaxMergedSegment(ByteSizeValue maxMergedSegment) { + mergePolicy.setMaxMergedSegmentMB(maxMergedSegment.mbFrac()); + } + + void setMaxMergesAtOnceExplicit(Integer maxMergeAtOnceExplicit) { + mergePolicy.setMaxMergeAtOnceExplicit(maxMergeAtOnceExplicit); + } + + void setMaxMergesAtOnce(Integer maxMergeAtOnce) { + mergePolicy.setMaxMergeAtOnce(maxMergeAtOnce); + } + + void setFloorSegmentSetting(ByteSizeValue floorSegementSetting) { + mergePolicy.setFloorSegmentMB(floorSegementSetting.mbFrac()); + } + + void setExpungeDeletesAllowed(Double value) { + mergePolicy.setForceMergeDeletesPctAllowed(value); + } + + void setNoCFSRatio(Double noCFSRatio) { + mergePolicy.setNoCFSRatio(noCFSRatio); } private int adjustMaxMergeAtOnceIfNeeded(int maxMergeAtOnce, double segmentsPerTier) { @@ -184,65 +218,6 @@ public final class MergePolicyConfig { return mergesEnabled ? mergePolicy : NoMergePolicy.INSTANCE; } - void onRefreshSettings(Settings settings) { - final double oldExpungeDeletesPctAllowed = mergePolicy.getForceMergeDeletesPctAllowed(); - final double expungeDeletesPctAllowed = settings.getAsDouble(INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED, oldExpungeDeletesPctAllowed); - if (expungeDeletesPctAllowed != oldExpungeDeletesPctAllowed) { - logger.info("updating [expunge_deletes_allowed] from [{}] to [{}]", oldExpungeDeletesPctAllowed, expungeDeletesPctAllowed); - mergePolicy.setForceMergeDeletesPctAllowed(expungeDeletesPctAllowed); - } - - final double oldFloorSegmentMB = mergePolicy.getFloorSegmentMB(); - final ByteSizeValue floorSegment = settings.getAsBytesSize(INDEX_MERGE_POLICY_FLOOR_SEGMENT, null); - if (floorSegment != null && floorSegment.mbFrac() != oldFloorSegmentMB) { - logger.info("updating [floor_segment] from [{}mb] to [{}]", oldFloorSegmentMB, floorSegment); - mergePolicy.setFloorSegmentMB(floorSegment.mbFrac()); - } - - final double oldSegmentsPerTier = mergePolicy.getSegmentsPerTier(); - final double segmentsPerTier = settings.getAsDouble(INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, oldSegmentsPerTier); - if (segmentsPerTier != oldSegmentsPerTier) { - logger.info("updating [segments_per_tier] from [{}] to [{}]", oldSegmentsPerTier, segmentsPerTier); - mergePolicy.setSegmentsPerTier(segmentsPerTier); - } - - final int oldMaxMergeAtOnce = mergePolicy.getMaxMergeAtOnce(); - int maxMergeAtOnce = settings.getAsInt(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, oldMaxMergeAtOnce); - if (maxMergeAtOnce != oldMaxMergeAtOnce) { - logger.info("updating [max_merge_at_once] from [{}] to [{}]", oldMaxMergeAtOnce, maxMergeAtOnce); - maxMergeAtOnce = adjustMaxMergeAtOnceIfNeeded(maxMergeAtOnce, segmentsPerTier); - mergePolicy.setMaxMergeAtOnce(maxMergeAtOnce); - } - - final int oldMaxMergeAtOnceExplicit = mergePolicy.getMaxMergeAtOnceExplicit(); - final int maxMergeAtOnceExplicit = settings.getAsInt(INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT, oldMaxMergeAtOnceExplicit); - if (maxMergeAtOnceExplicit != oldMaxMergeAtOnceExplicit) { - logger.info("updating [max_merge_at_once_explicit] from [{}] to [{}]", oldMaxMergeAtOnceExplicit, maxMergeAtOnceExplicit); - mergePolicy.setMaxMergeAtOnceExplicit(maxMergeAtOnceExplicit); - } - - final double oldMaxMergedSegmentMB = mergePolicy.getMaxMergedSegmentMB(); - final ByteSizeValue maxMergedSegment = settings.getAsBytesSize(INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT, null); - if (maxMergedSegment != null && maxMergedSegment.mbFrac() != oldMaxMergedSegmentMB) { - logger.info("updating [max_merged_segment] from [{}mb] to [{}]", oldMaxMergedSegmentMB, maxMergedSegment); - mergePolicy.setMaxMergedSegmentMB(maxMergedSegment.mbFrac()); - } - - final double oldReclaimDeletesWeight = mergePolicy.getReclaimDeletesWeight(); - final double reclaimDeletesWeight = settings.getAsDouble(INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, oldReclaimDeletesWeight); - if (reclaimDeletesWeight != oldReclaimDeletesWeight) { - logger.info("updating [reclaim_deletes_weight] from [{}] to [{}]", oldReclaimDeletesWeight, reclaimDeletesWeight); - mergePolicy.setReclaimDeletesWeight(reclaimDeletesWeight); - } - - double noCFSRatio = parseNoCFSRatio(settings.get(INDEX_COMPOUND_FORMAT, Double.toString(MergePolicyConfig.this.noCFSRatio))); - if (noCFSRatio != MergePolicyConfig.this.noCFSRatio) { - logger.info("updating index.compound_format from [{}] to [{}]", formatNoCFSRatio(MergePolicyConfig.this.noCFSRatio), formatNoCFSRatio(noCFSRatio)); - mergePolicy.setNoCFSRatio(noCFSRatio); - MergePolicyConfig.this.noCFSRatio = noCFSRatio; - } - } - private static double parseNoCFSRatio(String noCFSRatio) { noCFSRatio = noCFSRatio.trim(); if (noCFSRatio.equalsIgnoreCase("true")) { @@ -261,14 +236,4 @@ public final class MergePolicyConfig { } } } - - private static String formatNoCFSRatio(double ratio) { - if (ratio == 1.0) { - return Boolean.TRUE.toString(); - } else if (ratio == 0.0) { - return Boolean.FALSE.toString(); - } else { - return Double.toString(ratio); - } - } } diff --git a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java index ad6e2ec5d17..0d212a4eb30 100644 --- a/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java +++ b/core/src/main/java/org/elasticsearch/index/MergeSchedulerConfig.java @@ -20,9 +20,8 @@ package org.elasticsearch.index; import org.apache.lucene.index.ConcurrentMergeScheduler; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.index.IndexSettings; /** * The merge scheduler (ConcurrentMergeScheduler) controls the execution of @@ -52,18 +51,18 @@ import org.elasticsearch.index.IndexSettings; */ public final class MergeSchedulerConfig { - public static final String MAX_THREAD_COUNT = "index.merge.scheduler.max_thread_count"; - public static final String MAX_MERGE_COUNT = "index.merge.scheduler.max_merge_count"; - public static final String AUTO_THROTTLE = "index.merge.scheduler.auto_throttle"; + public static final Setting MAX_THREAD_COUNT_SETTING = new Setting<>("index.merge.scheduler.max_thread_count", (s) -> Integer.toString(Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(s) / 2))), (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_thread_count"), true, Setting.Scope.INDEX); + public static final Setting MAX_MERGE_COUNT_SETTING = new Setting<>("index.merge.scheduler.max_merge_count", (s) -> Integer.toString(MAX_THREAD_COUNT_SETTING.get(s) + 5), (s) -> Setting.parseInt(s, 1, "index.merge.scheduler.max_merge_count"), true, Setting.Scope.INDEX); + public static final Setting AUTO_THROTTLE_SETTING = Setting.boolSetting("index.merge.scheduler.auto_throttle", true, true, Setting.Scope.INDEX); private volatile boolean autoThrottle; private volatile int maxThreadCount; private volatile int maxMergeCount; - MergeSchedulerConfig(Settings settings) { - maxThreadCount = settings.getAsInt(MAX_THREAD_COUNT, Math.max(1, Math.min(4, EsExecutors.boundedNumberOfProcessors(settings) / 2))); - maxMergeCount = settings.getAsInt(MAX_MERGE_COUNT, maxThreadCount + 5); - this.autoThrottle = settings.getAsBoolean(AUTO_THROTTLE, true); + MergeSchedulerConfig(IndexSettings indexSettings) { + maxThreadCount = indexSettings.getValue(MAX_THREAD_COUNT_SETTING); + maxMergeCount = indexSettings.getValue(MAX_MERGE_COUNT_SETTING); + this.autoThrottle = indexSettings.getValue(AUTO_THROTTLE_SETTING); } /** diff --git a/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java new file mode 100644 index 00000000000..200aa1cc29a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -0,0 +1,238 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.search.internal.SearchContext; + +import java.util.concurrent.TimeUnit; + +/** + */ +public final class SearchSlowLog { + + private boolean reformat; + + private long queryWarnThreshold; + private long queryInfoThreshold; + private long queryDebugThreshold; + private long queryTraceThreshold; + + private long fetchWarnThreshold; + private long fetchInfoThreshold; + private long fetchDebugThreshold; + private long fetchTraceThreshold; + + private SlowLogLevel level; + + private final ESLogger queryLogger; + private final ESLogger fetchLogger; + + private static final String INDEX_SEARCH_SLOWLOG_PREFIX = "index.search.slowlog"; + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.warn", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.info", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.debug", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.trace", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.warn", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.info", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.debug", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING = Setting.timeSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.trace", TimeValue.timeValueNanos(-1), TimeValue.timeValueMillis(-1), true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_REFORMAT = Setting.boolSetting(INDEX_SEARCH_SLOWLOG_PREFIX + ".reformat", true, true, Setting.Scope.INDEX); + public static final Setting INDEX_SEARCH_SLOWLOG_LEVEL = new Setting<>(INDEX_SEARCH_SLOWLOG_PREFIX + ".level", SlowLogLevel.TRACE.name(), SlowLogLevel::parse, true, Setting.Scope.INDEX); + + public SearchSlowLog(IndexSettings indexSettings) { + + this.queryLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query"); + this.fetchLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch"); + + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_REFORMAT, this::setReformat); + this.reformat = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_REFORMAT); + + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING, this::setQueryWarnThreshold); + this.queryWarnThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING, this::setQueryInfoThreshold); + this.queryInfoThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING, this::setQueryDebugThreshold); + this.queryDebugThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING, this::setQueryTraceThreshold); + this.queryTraceThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING).nanos(); + + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING, this::setFetchWarnThreshold); + this.fetchWarnThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING, this::setFetchInfoThreshold); + this.fetchInfoThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING, this::setFetchDebugThreshold); + this.fetchDebugThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING).nanos(); + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING, this::setFetchTraceThreshold); + this.fetchTraceThreshold = indexSettings.getValue(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING).nanos(); + + indexSettings.getScopedSettings().addSettingsUpdateConsumer(INDEX_SEARCH_SLOWLOG_LEVEL, this::setLevel); + setLevel(indexSettings.getValue(INDEX_SEARCH_SLOWLOG_LEVEL)); + } + + private void setLevel(SlowLogLevel level) { + this.level = level; + this.queryLogger.setLevel(level.name()); + this.fetchLogger.setLevel(level.name()); + } + + public void onQueryPhase(SearchContext context, long tookInNanos) { + if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold) { + queryLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (queryInfoThreshold >= 0 && tookInNanos > queryInfoThreshold) { + queryLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (queryDebugThreshold >= 0 && tookInNanos > queryDebugThreshold) { + queryLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (queryTraceThreshold >= 0 && tookInNanos > queryTraceThreshold) { + queryLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } + } + + public void onFetchPhase(SearchContext context, long tookInNanos) { + if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold) { + fetchLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (fetchInfoThreshold >= 0 && tookInNanos > fetchInfoThreshold) { + fetchLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (fetchDebugThreshold >= 0 && tookInNanos > fetchDebugThreshold) { + fetchLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } else if (fetchTraceThreshold >= 0 && tookInNanos > fetchTraceThreshold) { + fetchLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); + } + } + + private static class SlowLogSearchContextPrinter { + private final SearchContext context; + private final long tookInNanos; + private final boolean reformat; + + public SlowLogSearchContextPrinter(SearchContext context, long tookInNanos, boolean reformat) { + this.context = context; + this.tookInNanos = tookInNanos; + this.reformat = reformat; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], "); + if (context.types() == null) { + sb.append("types[], "); + } else { + sb.append("types["); + Strings.arrayToDelimitedString(context.types(), ",", sb); + sb.append("], "); + } + if (context.groupStats() == null) { + sb.append("stats[], "); + } else { + sb.append("stats["); + Strings.collectionToDelimitedString(context.groupStats(), ",", "", "", sb); + sb.append("], "); + } + sb.append("search_type[").append(context.searchType()).append("], total_shards[").append(context.numberOfShards()).append("], "); + if (context.request().source() != null) { + sb.append("source[").append(context.request().source()).append("], "); + } else { + sb.append("source[], "); + } + return sb.toString(); + } + } + + private void setReformat(boolean reformat) { + this.reformat = reformat; + } + + private void setQueryWarnThreshold(TimeValue warnThreshold) { + this.queryWarnThreshold = warnThreshold.nanos(); + } + + private void setQueryInfoThreshold(TimeValue infoThreshold) { + this.queryInfoThreshold = infoThreshold.nanos(); + } + + private void setQueryDebugThreshold(TimeValue debugThreshold) { + this.queryDebugThreshold = debugThreshold.nanos(); + } + + private void setQueryTraceThreshold(TimeValue traceThreshold) { + this.queryTraceThreshold = traceThreshold.nanos(); + } + + private void setFetchWarnThreshold(TimeValue warnThreshold) { + this.fetchWarnThreshold = warnThreshold.nanos(); + } + + private void setFetchInfoThreshold(TimeValue infoThreshold) { + this.fetchInfoThreshold = infoThreshold.nanos(); + } + + private void setFetchDebugThreshold(TimeValue debugThreshold) { + this.fetchDebugThreshold = debugThreshold.nanos(); + } + + private void setFetchTraceThreshold(TimeValue traceThreshold) { + this.fetchTraceThreshold = traceThreshold.nanos(); + } + + boolean isReformat() { + return reformat; + } + + long getQueryWarnThreshold() { + return queryWarnThreshold; + } + + long getQueryInfoThreshold() { + return queryInfoThreshold; + } + + long getQueryDebugThreshold() { + return queryDebugThreshold; + } + + long getQueryTraceThreshold() { + return queryTraceThreshold; + } + + long getFetchWarnThreshold() { + return fetchWarnThreshold; + } + + long getFetchInfoThreshold() { + return fetchInfoThreshold; + } + + long getFetchDebugThreshold() { + return fetchDebugThreshold; + } + + long getFetchTraceThreshold() { + return fetchTraceThreshold; + } + + SlowLogLevel getLevel() { + return level; + } +} diff --git a/plugins/site-example/build.gradle b/core/src/main/java/org/elasticsearch/index/SlowLogLevel.java similarity index 77% rename from plugins/site-example/build.gradle rename to core/src/main/java/org/elasticsearch/index/SlowLogLevel.java index d2228129025..c5b65ebb8a9 100644 --- a/plugins/site-example/build.gradle +++ b/core/src/main/java/org/elasticsearch/index/SlowLogLevel.java @@ -16,12 +16,13 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.index; -esplugin { - description 'Demonstrates how to serve resources via elasticsearch.' - jvm false - site true +import java.util.Locale; + +public enum SlowLogLevel { + WARN, TRACE, INFO, DEBUG; + public static SlowLogLevel parse(String level) { + return valueOf(level.toUpperCase(Locale.ROOT)); + } } - -// no unit tests -test.enabled = false diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 1de139aa695..f18cc631aea 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; @@ -69,7 +70,7 @@ import java.util.concurrent.Executor; */ public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener>, Closeable { - public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly"; + public static final Setting INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING = Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, false, Setting.Scope.INDEX); private final boolean loadRandomAccessFiltersEagerly; private final Cache> loadedFilters; @@ -82,14 +83,14 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L if (listener == null) { throw new IllegalArgumentException("listener must not be null"); } - this.loadRandomAccessFiltersEagerly = this.indexSettings.getSettings().getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true); + this.loadRandomAccessFiltersEagerly = this.indexSettings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING); this.loadedFilters = CacheBuilder.>builder().removalListener(this).build(); this.warmer = new BitSetProducerWarmer(); this.indicesWarmer = indicesWarmer; indicesWarmer.addListener(warmer); this.listener = listener; } - + public BitSetProducer getBitSetProducer(Query query) { diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index 7d68eecf8a3..221df42b055 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.similarities.Similarity; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -38,6 +39,8 @@ import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.threadpool.ThreadPool; +import java.util.Set; + /* * Holds all the configuration that is used to create an {@link Engine}. * Once {@link Engine} has been created with this object, changes to this @@ -64,16 +67,30 @@ public final class EngineConfig { private final QueryCache queryCache; private final QueryCachingPolicy queryCachingPolicy; + static { + + } /** * Index setting to change the low level lucene codec used for writing new segments. * This setting is not realtime updateable. */ - public static final String INDEX_CODEC_SETTING = "index.codec"; + public static final Setting INDEX_CODEC_SETTING = new Setting<>("index.codec", "default", (s) -> { + switch(s) { + case "default": + case "best_compression": + case "lucene_default": + return s; + default: + if (Codec.availableCodecs().contains(s) == false) { // we don't error message the not officially supported ones + throw new IllegalArgumentException("unknown value for [index.codec] must be one of [default, best_compression] but was: " + s); + } + return s; + } + }, false, Setting.Scope.INDEX); /** if set to true the engine will start even if the translog id in the commit point can not be found */ public static final String INDEX_FORCE_NEW_TRANSLOG = "index.engine.force_new_translog"; - private static final String DEFAULT_CODEC_NAME = "default"; private TranslogConfig translogConfig; private boolean create = false; @@ -97,7 +114,7 @@ public final class EngineConfig { this.similarity = similarity; this.codecService = codecService; this.eventListener = eventListener; - codecName = settings.get(EngineConfig.INDEX_CODEC_SETTING, EngineConfig.DEFAULT_CODEC_NAME); + codecName = indexSettings.getValue(INDEX_CODEC_SETTING); // We give IndexWriter a "huge" (256 MB) buffer, so it won't flush on its own unless the ES indexing buffer is also huge and/or // there are not too many shards allocated to this node. Instead, IndexingMemoryController periodically checks // and refreshes the most heap-consuming shards when total indexing heap usage across all shards is too high: diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 74cac49b76d..d9ee2f4177a 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -231,7 +231,8 @@ public class InternalEngine extends Engine { protected void recoverFromTranslog(EngineConfig engineConfig, Translog.TranslogGeneration translogGeneration) throws IOException { int opsRecovered = 0; final TranslogRecoveryPerformer handler = engineConfig.getTranslogRecoveryPerformer(); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { + try { + Translog.Snapshot snapshot = translog.newSnapshot(); Translog.Operation operation; while ((operation = snapshot.next()) != null) { try { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 8ac0bda2f0b..b898f3ffd2a 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.Accountable; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData; @@ -55,9 +56,17 @@ import static java.util.Collections.unmodifiableMap; /** */ public class IndexFieldDataService extends AbstractIndexComponent implements Closeable { - - public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache"; public static final String FIELDDATA_CACHE_VALUE_NODE = "node"; + public static final String FIELDDATA_CACHE_KEY = "index.fielddata.cache"; + public static final Setting INDEX_FIELDDATA_CACHE_KEY = new Setting<>(FIELDDATA_CACHE_KEY, (s) -> FIELDDATA_CACHE_VALUE_NODE, (s) -> { + switch (s) { + case "node": + case "none": + return s; + default: + throw new IllegalArgumentException("failed to parse [" + s + "] must be one of [node,node]"); + } + }, false, Setting.Scope.INDEX); private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> { throw new IllegalStateException("Can't load fielddata on [" + fieldType.name() @@ -228,7 +237,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo if (cache == null) { // we default to node level cache, which in turn defaults to be unbounded // this means changing the node level settings is simple, just set the bounds there - String cacheType = type.getSettings().get("cache", indexSettings.getSettings().get(FIELDDATA_CACHE_KEY, FIELDDATA_CACHE_VALUE_NODE)); + String cacheType = type.getSettings().get("cache", indexSettings.getValue(INDEX_FIELDDATA_CACHE_KEY)); if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) { cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName, type); } else if ("none".equals(cacheType)){ diff --git a/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 32e21be0326..ef901263d0a 100644 --- a/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/core/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -19,7 +19,9 @@ package org.elasticsearch.index.get; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.Term; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; @@ -50,7 +52,10 @@ import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.search.SearchHitField; +import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase; import org.elasticsearch.search.fetch.source.FetchSourceContext; +import org.elasticsearch.search.internal.InternalSearchHitField; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; @@ -350,6 +355,14 @@ public final class ShardGetService extends AbstractIndexShardComponent { } } + if (docMapper.parentFieldMapper().active()) { + String parentId = ParentFieldSubFetchPhase.getParentId(docMapper.parentFieldMapper(), docIdAndVersion.context.reader(), docIdAndVersion.docId); + if (fields == null) { + fields = new HashMap<>(1); + } + fields.put(ParentFieldMapper.NAME, new GetField(ParentFieldMapper.NAME, Collections.singletonList(parentId))); + } + // now, go and do the script thingy if needed if (gFields != null && gFields.length > 0) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index c2d644d393d..0528541238a 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -79,10 +79,20 @@ public class DocumentMapper implements ToXContent { this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.rootObjectMapper = builder.build(builderContext); + final String type = rootObjectMapper.name(); + DocumentMapper existingMapper = mapperService.documentMapper(type); for (Map.Entry entry : mapperService.mapperRegistry.getMetadataMapperParsers().entrySet()) { final String name = entry.getKey(); - final TypeParser parser = entry.getValue(); - final MetadataFieldMapper metadataMapper = parser.getDefault(indexSettings, mapperService.fullName(name), builder.name()); + final MetadataFieldMapper existingMetadataMapper = existingMapper == null + ? null + : (MetadataFieldMapper) existingMapper.mappers().getMapper(name); + final MetadataFieldMapper metadataMapper; + if (existingMetadataMapper == null) { + final TypeParser parser = entry.getValue(); + metadataMapper = parser.getDefault(indexSettings, mapperService.fullName(name), builder.name()); + } else { + metadataMapper = existingMetadataMapper; + } metadataMappers.put(metadataMapper.getClass(), metadataMapper); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 23d8cd56034..d0d75709767 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -27,6 +27,7 @@ import org.apache.lucene.index.IndexOptions; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -47,7 +48,8 @@ import java.util.Map; import java.util.stream.StreamSupport; public abstract class FieldMapper extends Mapper implements Cloneable { - + public static final Setting IGNORE_MALFORMED_SETTING = Setting.boolSetting("index.mapping.ignore_malformed", false, false, Setting.Scope.INDEX); + public static final Setting COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", false, false, Setting.Scope.INDEX); public abstract static class Builder extends Mapper.Builder { protected final MappedFieldType fieldType; @@ -68,6 +70,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { this.fieldType = fieldType.clone(); this.defaultFieldType = defaultFieldType.clone(); this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable + this.docValuesSet = fieldType.hasDocValues(); multiFieldsBuilder = new MultiFields.Builder(); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 5f8049b55fb..09d459fc4a2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -389,7 +389,12 @@ public abstract class MappedFieldType extends FieldType { return false; } - /** Creates a term associated with the field of this mapper for the given value */ + /** + * Creates a term associated with the field of this mapper for the given + * value. Its important to use termQuery when building term queries because + * things like ParentFieldMapper override it to make more interesting + * queries. + */ protected Term createTerm(Object value) { return new Term(name(), indexedValueForSearch(value)); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 25796c605f7..67ab567126e 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectHashSet; + import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; import org.apache.lucene.index.IndexOptions; @@ -33,11 +34,11 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchGenerationException; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; @@ -59,7 +60,6 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.IdentityHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -78,9 +78,26 @@ import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; */ public class MapperService extends AbstractIndexComponent implements Closeable { + /** + * The reason why a mapping is being merged. + */ + public enum MergeReason { + /** + * Create or update a mapping. + */ + MAPPING_UPDATE, + /** + * Recovery of an existing mapping, for instance because of a restart, + * if a shard was moved to a different node or for administrative + * purposes. + */ + MAPPING_RECOVERY; + } + public static final String DEFAULT_MAPPING = "_default_"; - public static final String INDEX_MAPPER_DYNAMIC_SETTING = "index.mapper.dynamic"; + public static final Setting INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING = Setting.longSetting("index.mapping.nested_fields.limit", 50l, 0, true, Setting.Scope.INDEX); public static final boolean INDEX_MAPPER_DYNAMIC_DEFAULT = true; + public static final Setting INDEX_MAPPER_DYNAMIC_SETTING = Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, false, Setting.Scope.INDEX); private static ObjectHashSet META_FIELDS = ObjectHashSet.from( "_uid", "_id", "_type", "_all", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" @@ -108,8 +125,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { private final MapperAnalyzerWrapper searchAnalyzer; private final MapperAnalyzerWrapper searchQuoteAnalyzer; - private final List typeListeners = new CopyOnWriteArrayList<>(); - private volatile Map unmappedFieldTypes = emptyMap(); private volatile Set parentTypes = emptySet(); @@ -128,7 +143,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer()); this.mapperRegistry = mapperRegistry; - this.dynamic = this.indexSettings.getSettings().getAsBoolean(INDEX_MAPPER_DYNAMIC_SETTING, INDEX_MAPPER_DYNAMIC_DEFAULT); + this.dynamic = this.indexSettings.getValue(INDEX_MAPPER_DYNAMIC_SETTING); defaultPercolatorMappingSource = "{\n" + "\"_default_\":{\n" + "\"properties\" : {\n" + @@ -195,15 +210,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return this.documentParser; } - public void addTypeListener(DocumentTypeListener listener) { - typeListeners.add(listener); - } - - public void removeTypeListener(DocumentTypeListener listener) { - typeListeners.remove(listener); - } - - public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault, boolean updateAllTypes) { + public DocumentMapper merge(String type, CompressedXContent mappingSource, MergeReason reason, boolean updateAllTypes) { if (DEFAULT_MAPPING.equals(type)) { // verify we can parse it // NOTE: never apply the default here @@ -221,14 +228,18 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return mapper; } else { synchronized (this) { - // only apply the default mapping if we don't have the type yet - applyDefault &= mappers.containsKey(type) == false; - return merge(parse(type, mappingSource, applyDefault), updateAllTypes); + final boolean applyDefault = + // the default was already applied if we are recovering + reason != MergeReason.MAPPING_RECOVERY + // only apply the default mapping if we don't have the type yet + && mappers.containsKey(type) == false; + DocumentMapper mergeWith = parse(type, mappingSource, applyDefault); + return merge(mergeWith, reason, updateAllTypes); } } } - private synchronized DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) { + private synchronized DocumentMapper merge(DocumentMapper mapper, MergeReason reason, boolean updateAllTypes) { if (mapper.type().length() == 0) { throw new InvalidTypeNameException("mapping type name is empty"); } @@ -281,6 +292,11 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } fullPathObjectMappers = Collections.unmodifiableMap(fullPathObjectMappers); + + if (reason == MergeReason.MAPPING_UPDATE) { + checkNestedFieldsLimit(fullPathObjectMappers); + } + Set parentTypes = this.parentTypes; if (oldMapper == null && newMapper.parentFieldMapper().active()) { parentTypes = new HashSet<>(parentTypes.size() + 1); @@ -309,14 +325,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { this.fullPathObjectMappers = fullPathObjectMappers; this.parentTypes = parentTypes; - // 5. send notifications about the change - if (oldMapper == null) { - // means the mapping was created - for (DocumentTypeListener typeListener : typeListeners) { - typeListener.beforeCreate(mapper); - } - } - assert assertSerialization(newMapper); assert assertMappersShareSameFieldType(); @@ -393,6 +401,19 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } } + private void checkNestedFieldsLimit(Map fullPathObjectMappers) { + long allowedNestedFields = indexSettings.getValue(INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING); + long actualNestedFields = 0; + for (ObjectMapper objectMapper : fullPathObjectMappers.values()) { + if (objectMapper.nested().isNested()) { + actualNestedFields++; + } + } + if (allowedNestedFields >= 0 && actualNestedFields > allowedNestedFields) { + throw new IllegalArgumentException("Limit of nested fields [" + allowedNestedFields + "] in index [" + index().name() + "] has been exceeded"); + } + } + public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException { String defaultMappingSource; if (PercolatorService.TYPE_NAME.equals(mappingType)) { @@ -442,105 +463,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return new DocumentMapperForType(mapper, mapper.mapping()); } - /** - * A filter for search. If a filter is required, will return it, otherwise, will return null. - */ - @Nullable - public Query searchFilter(String... types) { - boolean filterPercolateType = hasMapping(PercolatorService.TYPE_NAME); - if (types != null && filterPercolateType) { - for (String type : types) { - if (PercolatorService.TYPE_NAME.equals(type)) { - filterPercolateType = false; - break; - } - } - } - Query percolatorType = null; - if (filterPercolateType) { - percolatorType = documentMapper(PercolatorService.TYPE_NAME).typeFilter(); - } - - if (types == null || types.length == 0) { - if (hasNested && filterPercolateType) { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.add(percolatorType, Occur.MUST_NOT); - bq.add(Queries.newNonNestedFilter(), Occur.MUST); - return new ConstantScoreQuery(bq.build()); - } else if (hasNested) { - return Queries.newNonNestedFilter(); - } else if (filterPercolateType) { - return new ConstantScoreQuery(Queries.not(percolatorType)); - } else { - return null; - } - } - // if we filter by types, we don't need to filter by non nested docs - // since they have different types (starting with __) - if (types.length == 1) { - DocumentMapper docMapper = documentMapper(types[0]); - Query filter = docMapper != null ? docMapper.typeFilter() : new TermQuery(new Term(TypeFieldMapper.NAME, types[0])); - if (filterPercolateType) { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.add(percolatorType, Occur.MUST_NOT); - bq.add(filter, Occur.MUST); - return new ConstantScoreQuery(bq.build()); - } else { - return filter; - } - } - // see if we can use terms filter - boolean useTermsFilter = true; - for (String type : types) { - DocumentMapper docMapper = documentMapper(type); - if (docMapper == null) { - useTermsFilter = false; - break; - } - if (docMapper.typeMapper().fieldType().indexOptions() == IndexOptions.NONE) { - useTermsFilter = false; - break; - } - } - - // We only use terms filter is there is a type filter, this means we don't need to check for hasNested here - if (useTermsFilter) { - BytesRef[] typesBytes = new BytesRef[types.length]; - for (int i = 0; i < typesBytes.length; i++) { - typesBytes[i] = new BytesRef(types[i]); - } - TermsQuery termsFilter = new TermsQuery(TypeFieldMapper.NAME, typesBytes); - if (filterPercolateType) { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.add(percolatorType, Occur.MUST_NOT); - bq.add(termsFilter, Occur.MUST); - return new ConstantScoreQuery(bq.build()); - } else { - return termsFilter; - } - } else { - BooleanQuery.Builder typesBool = new BooleanQuery.Builder(); - for (String type : types) { - DocumentMapper docMapper = documentMapper(type); - if (docMapper == null) { - typesBool.add(new TermQuery(new Term(TypeFieldMapper.NAME, type)), BooleanClause.Occur.SHOULD); - } else { - typesBool.add(docMapper.typeFilter(), BooleanClause.Occur.SHOULD); - } - } - BooleanQuery.Builder bool = new BooleanQuery.Builder(); - bool.add(typesBool.build(), Occur.MUST); - if (filterPercolateType) { - bool.add(percolatorType, BooleanClause.Occur.MUST_NOT); - } - if (hasNested) { - bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST); - } - - return new ConstantScoreQuery(bool.build()); - } - } - /** * Returns the {@link MappedFieldType} for the give fullName. * @@ -603,33 +525,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable { return this.searchQuoteAnalyzer; } - /** - * Resolves the closest inherited {@link ObjectMapper} that is nested. - */ - public ObjectMapper resolveClosestNestedObjectMapper(String fieldName) { - int indexOf = fieldName.lastIndexOf('.'); - if (indexOf == -1) { - return null; - } else { - do { - String objectPath = fieldName.substring(0, indexOf); - ObjectMapper objectMapper = fullPathObjectMappers.get(objectPath); - if (objectMapper == null) { - indexOf = objectPath.lastIndexOf('.'); - continue; - } - - if (objectMapper.nested().isNested()) { - return objectMapper; - } - - indexOf = objectPath.lastIndexOf('.'); - } while (indexOf != -1); - } - - return null; - } - public Set getParentTypes() { return parentTypes; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index a0a5e5e5bce..5f829fdecea 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -51,6 +52,7 @@ import java.util.List; * */ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldMapper.IncludeInAll { + private static final Setting COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", true, false, Setting.Scope.INDEX); // this is private since it has a different default public static class Defaults { @@ -89,7 +91,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM return new Explicit<>(ignoreMalformed, true); } if (context.indexSettings() != null) { - return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false); + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); } return Defaults.IGNORE_MALFORMED; } @@ -104,7 +106,7 @@ public abstract class NumberFieldMapper extends FieldMapper implements AllFieldM return new Explicit<>(coerce, true); } if (context.indexSettings() != null) { - return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false); + return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); } return Defaults.COERCE; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java index 52202fac716..29a2aca7bec 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/BaseGeoPointFieldMapper.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -59,7 +60,6 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; */ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser { public static final String CONTENT_TYPE = "geo_point"; - public static class Names { public static final String LAT = "lat"; public static final String LAT_SUFFIX = "." + LAT; @@ -142,7 +142,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr return new Explicit<>(ignoreMalformed, true); } if (context.indexSettings() != null) { - return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false); + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); } return Defaults.IGNORE_MALFORMED; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java index 8c954c06a5e..c008be6f673 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperLegacy.java @@ -102,7 +102,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement return new Explicit<>(coerce, true); } if (context.indexSettings() != null) { - return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false); + return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); } return Defaults.COERCE; } @@ -364,4 +364,4 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 2ea59393ca0..0de2cd2b60b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -137,7 +137,7 @@ public class GeoShapeFieldMapper extends FieldMapper { return new Explicit<>(coerce, true); } if (context.indexSettings() != null) { - return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false); + return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); } return Defaults.COERCE; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index abb9178b875..86009ffcccc 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -22,25 +22,28 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.queries.TermsQuery; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.DocValuesTermsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; -import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperBuilders; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.query.QueryShardContext; import java.io.IOException; @@ -65,22 +68,13 @@ public class ParentFieldMapper extends MetadataFieldMapper { public static class Defaults { public static final String NAME = ParentFieldMapper.NAME; - public static final MappedFieldType FIELD_TYPE = new ParentFieldType(); - public static final MappedFieldType JOIN_FIELD_TYPE = new ParentFieldType(); + public static final ParentFieldType FIELD_TYPE = new ParentFieldType(); static { - FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - FIELD_TYPE.setTokenized(false); - FIELD_TYPE.setStored(true); - FIELD_TYPE.setOmitNorms(true); - FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setName(NAME); + FIELD_TYPE.setIndexOptions(IndexOptions.NONE); + FIELD_TYPE.setHasDocValues(true); + FIELD_TYPE.setDocValuesType(DocValuesType.SORTED); FIELD_TYPE.freeze(); - - JOIN_FIELD_TYPE.setHasDocValues(true); - JOIN_FIELD_TYPE.setDocValuesType(DocValuesType.SORTED); - JOIN_FIELD_TYPE.freeze(); } } @@ -88,17 +82,10 @@ public class ParentFieldMapper extends MetadataFieldMapper { private String parentType; - protected String indexName; - private final String documentType; - private final MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone(); - - private final MappedFieldType childJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone(); - public Builder(String documentType) { - super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); - this.indexName = name; + super(Defaults.NAME, new ParentFieldType(Defaults.FIELD_TYPE, documentType), Defaults.FIELD_TYPE); this.documentType = documentType; builder = this; } @@ -108,22 +95,14 @@ public class ParentFieldMapper extends MetadataFieldMapper { return builder; } - @Override - public Builder fieldDataSettings(Settings fieldDataSettings) { - Settings settings = Settings.builder().put(childJoinFieldType.fieldDataType().getSettings()).put(fieldDataSettings).build(); - childJoinFieldType.setFieldDataType(new FieldDataType(childJoinFieldType.fieldDataType().getType(), settings)); - return this; - } - @Override public ParentFieldMapper build(BuilderContext context) { if (parentType == null) { throw new MapperParsingException("[_parent] field mapping must contain the [type] option"); } - parentJoinFieldType.setName(joinField(documentType)); - parentJoinFieldType.setFieldDataType(null); - childJoinFieldType.setName(joinField(parentType)); - return new ParentFieldMapper(fieldType, parentJoinFieldType, childJoinFieldType, parentType, context.indexSettings()); + name = joinField(parentType); + setupFieldType(context); + return new ParentFieldMapper(createParentJoinFieldMapper(documentType, context), fieldType, parentType, context.indexSettings()); } } @@ -152,19 +131,40 @@ public class ParentFieldMapper extends MetadataFieldMapper { } @Override - public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String parentType) { - return new ParentFieldMapper(indexSettings, fieldType, parentType); + public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { + StringFieldMapper parentJoinField = createParentJoinFieldMapper(typeName, new BuilderContext(indexSettings, new ContentPath(0))); + MappedFieldType childJoinFieldType = Defaults.FIELD_TYPE.clone(); + childJoinFieldType.setName(joinField(null)); + return new ParentFieldMapper(parentJoinField, childJoinFieldType, null, indexSettings); } } + static StringFieldMapper createParentJoinFieldMapper(String docType, BuilderContext context) { + StringFieldMapper.Builder parentJoinField = MapperBuilders.stringField(joinField(docType)); + parentJoinField.indexOptions(IndexOptions.NONE); + parentJoinField.docValues(true); + parentJoinField.fieldType().setDocValuesType(DocValuesType.SORTED); + parentJoinField.fieldType().setFieldDataType(null); + return parentJoinField.build(context); + } + static final class ParentFieldType extends MappedFieldType { + final String documentType; + public ParentFieldType() { - setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, Loading.EAGER_VALUE))); + setFieldDataType(new FieldDataType(NAME, settingsBuilder().put(MappedFieldType.Loading.KEY, Loading.EAGER_VALUE))); + documentType = null; } - protected ParentFieldType(ParentFieldType ref) { + ParentFieldType(ParentFieldType ref, String documentType) { super(ref); + this.documentType = documentType; + } + + private ParentFieldType(ParentFieldType ref) { + super(ref); + this.documentType = ref.documentType; } @Override @@ -177,30 +177,6 @@ public class ParentFieldMapper extends MetadataFieldMapper { return CONTENT_TYPE; } - @Override - public Uid value(Object value) { - if (value == null) { - return null; - } - return Uid.createUid(value.toString()); - } - - @Override - public Object valueForSearch(Object value) { - if (value == null) { - return null; - } - String sValue = value.toString(); - if (sValue == null) { - return null; - } - int index = sValue.indexOf(Uid.DELIMITER); - if (index == -1) { - return sValue; - } - return sValue.substring(index + 1); - } - /** * We don't need to analyzer the text, and we need to convert it to UID... */ @@ -216,67 +192,30 @@ public class ParentFieldMapper extends MetadataFieldMapper { @Override public Query termsQuery(List values, @Nullable QueryShardContext context) { - if (context == null) { - return super.termsQuery(values, context); + BytesRef[] ids = new BytesRef[values.size()]; + for (int i = 0; i < ids.length; i++) { + ids[i] = indexedValueForSearch(values.get(i)); } - - List types = new ArrayList<>(context.getMapperService().types().size()); - for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) { - if (!documentMapper.parentFieldMapper().active()) { - types.add(documentMapper.type()); - } - } - - List bValues = new ArrayList<>(values.size()); - for (Object value : values) { - BytesRef bValue = BytesRefs.toBytesRef(value); - if (Uid.hasDelimiter(bValue)) { - bValues.add(bValue); - } else { - // we use all non child types, cause we don't know if its exact or not... - for (String type : types) { - bValues.add(Uid.createUidAsBytes(type, bValue)); - } - } - } - return new TermsQuery(name(), bValues); + BooleanQuery.Builder query = new BooleanQuery.Builder(); + query.add(new DocValuesTermsQuery(name(), ids), BooleanClause.Occur.MUST); + query.add(new TermQuery(new Term(TypeFieldMapper.NAME, documentType)), BooleanClause.Occur.FILTER); + return query.build(); } } private final String parentType; - // determines the field data settings - private MappedFieldType childJoinFieldType; - // has no impact of field data settings, is just here for creating a join field, the parent field mapper in the child type pointing to this type determines the field data settings for this join field - private final MappedFieldType parentJoinFieldType; + // has no impact of field data settings, is just here for creating a join field, + // the parent field mapper in the child type pointing to this type determines the field data settings for this join field + private final StringFieldMapper parentJoinField; - private ParentFieldMapper(MappedFieldType fieldType, MappedFieldType parentJoinFieldType, MappedFieldType childJoinFieldType, String parentType, Settings indexSettings) { - super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); + private ParentFieldMapper(StringFieldMapper parentJoinField, MappedFieldType childJoinFieldType, String parentType, Settings indexSettings) { + super(NAME, childJoinFieldType, Defaults.FIELD_TYPE, indexSettings); this.parentType = parentType; - this.parentJoinFieldType = parentJoinFieldType; - this.parentJoinFieldType.freeze(); - this.childJoinFieldType = childJoinFieldType; - if (childJoinFieldType != null) { - this.childJoinFieldType.freeze(); - } - } - - private ParentFieldMapper(Settings indexSettings, MappedFieldType existing, String parentType) { - this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), joinFieldTypeForParentType(parentType, indexSettings), null, null, indexSettings); - } - - private static MappedFieldType joinFieldTypeForParentType(String parentType, Settings indexSettings) { - MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone(); - parentJoinFieldType.setName(joinField(parentType)); - parentJoinFieldType.freeze(); - return parentJoinFieldType; + this.parentJoinField = parentJoinField; } public MappedFieldType getParentJoinFieldType() { - return parentJoinFieldType; - } - - public MappedFieldType getChildJoinFieldType() { - return childJoinFieldType; + return parentJoinField.fieldType(); } public String type() { @@ -298,7 +237,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { protected void parseCreateField(ParseContext context, List fields) throws IOException { boolean parent = context.docMapper().isParent(context.type()); if (parent) { - addJoinFieldIfNeeded(fields, parentJoinFieldType, context.id()); + fields.add(new SortedDocValuesField(parentJoinField.fieldType().name(), new BytesRef(context.id()))); } if (!active()) { @@ -309,8 +248,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { // we are in the parsing of _parent phase String parentId = context.parser().text(); context.sourceToParse().parent(parentId); - fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType())); - addJoinFieldIfNeeded(fields, childJoinFieldType, parentId); + fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(parentId))); } else { // otherwise, we are running it post processing of the xcontent String parsedParentId = context.doc().get(Defaults.NAME); @@ -321,8 +259,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { throw new MapperParsingException("No parent id provided, not within the document, and not externally"); } // we did not add it in the parsing phase, add it now - fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType())); - addJoinFieldIfNeeded(fields, childJoinFieldType, parentId); + fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(parentId))); } else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), parentType, parentId))) { throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]"); } @@ -331,12 +268,6 @@ public class ParentFieldMapper extends MetadataFieldMapper { // we have parent mapping, yet no value was set, ignore it... } - private void addJoinFieldIfNeeded(List fields, MappedFieldType fieldType, String id) { - if (fieldType.hasDocValues()) { - fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(id))); - } - } - public static String joinField(String parentType) { return ParentFieldMapper.NAME + "#" + parentType; } @@ -346,8 +277,9 @@ public class ParentFieldMapper extends MetadataFieldMapper { return CONTENT_TYPE; } - private boolean joinFieldHasCustomFieldDataSettings() { - return childJoinFieldType != null && childJoinFieldType.fieldDataType() != null && childJoinFieldType.fieldDataType().equals(Defaults.JOIN_FIELD_TYPE.fieldDataType()) == false; + @Override + public Iterator iterator() { + return Collections.singleton(parentJoinField).iterator(); } @Override @@ -360,12 +292,16 @@ public class ParentFieldMapper extends MetadataFieldMapper { builder.startObject(CONTENT_TYPE); builder.field("type", parentType); if (includeDefaults || joinFieldHasCustomFieldDataSettings()) { - builder.field("fielddata", (Map) childJoinFieldType.fieldDataType().getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap()); } builder.endObject(); return builder; } + private boolean joinFieldHasCustomFieldDataSettings() { + return fieldType != null && fieldType.fieldDataType() != null && fieldType.fieldDataType().equals(Defaults.FIELD_TYPE.fieldDataType()) == false; + } + @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { super.doMerge(mergeWith, updateAllTypes); @@ -375,18 +311,13 @@ public class ParentFieldMapper extends MetadataFieldMapper { } List conflicts = new ArrayList<>(); - fieldType().checkCompatibility(fieldMergeWith.fieldType(), conflicts, true); // always strict, this cannot change - parentJoinFieldType.checkCompatibility(fieldMergeWith.parentJoinFieldType, conflicts, true); // same here - if (childJoinFieldType != null) { - // TODO: this can be set to false when the old parent/child impl is removed, we can do eager global ordinals loading per type. - childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, updateAllTypes == false); - } + fieldType().checkCompatibility(fieldMergeWith.fieldType, conflicts, true); if (conflicts.isEmpty() == false) { throw new IllegalArgumentException("Merge conflicts: " + conflicts); } if (active()) { - childJoinFieldType = fieldMergeWith.childJoinFieldType.clone(); + fieldType = fieldMergeWith.fieldType.clone(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 9984463ffc0..c83428d2239 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -213,11 +213,24 @@ public class IpFieldMapper extends NumberFieldMapper { @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { if (value != null) { - long[] fromTo; + String term; if (value instanceof BytesRef) { - fromTo = Cidrs.cidrMaskToMinMax(((BytesRef) value).utf8ToString()); + term = ((BytesRef) value).utf8ToString(); } else { - fromTo = Cidrs.cidrMaskToMinMax(value.toString()); + term = value.toString(); + } + long[] fromTo; + // assume that the term is either a CIDR range or the + // term is a single IPv4 address; if either of these + // assumptions is wrong, the CIDR parsing will fail + // anyway, and that is okay + if (term.contains("/")) { + // treat the term as if it is in CIDR notation + fromTo = Cidrs.cidrMaskToMinMax(term); + } else { + // treat the term as if it is a single IPv4, and + // apply a CIDR mask equivalent to the host route + fromTo = Cidrs.cidrMaskToMinMax(term + "/32"); } if (fromTo != null) { return rangeQuery(fromTo[0] == 0 ? null : fromTo[0], diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java index 9a57ea57764..c4b2b06e0e0 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorFieldMapper.java @@ -119,7 +119,7 @@ public class PercolatorFieldMapper extends FieldMapper { this.queryShardContext = queryShardContext; this.queryTermsField = queryTermsField; this.unknownQueryField = unknownQueryField; - this.mapUnmappedFieldAsString = indexSettings.getAsBoolean(PercolatorQueriesRegistry.MAP_UNMAPPED_FIELDS_AS_STRING, false); + this.mapUnmappedFieldAsString = PercolatorQueriesRegistry.INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING.get(indexSettings); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java index 143616b7084..f55a739caf3 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/PercolatorQueriesRegistry.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -60,7 +61,7 @@ import java.util.concurrent.TimeUnit; */ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent implements Closeable { - public final static String MAP_UNMAPPED_FIELDS_AS_STRING = "index.percolator.map_unmapped_fields_as_string"; + public final static Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, false, Setting.Scope.INDEX); private final ConcurrentMap percolateQueries = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); private final QueryShardContext queryShardContext; @@ -72,7 +73,7 @@ public final class PercolatorQueriesRegistry extends AbstractIndexShardComponent public PercolatorQueriesRegistry(ShardId shardId, IndexSettings indexSettings, QueryShardContext queryShardContext) { super(shardId, indexSettings); this.queryShardContext = queryShardContext; - this.mapUnmappedFieldsAsString = this.indexSettings.getSettings().getAsBoolean(MAP_UNMAPPED_FIELDS_AS_STRING, false); + this.mapUnmappedFieldsAsString = indexSettings.getValue(INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING); } public ConcurrentMap getPercolateQueries() { diff --git a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index 79ba3804ecd..d2116ae3c05 100644 --- a/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -42,7 +42,7 @@ import java.util.Objects; * Base class for all classes producing lucene queries. * Supports conversion to BytesReference and creation of lucene Query objects. */ -public abstract class AbstractQueryBuilder extends ToXContentToBytes implements QueryBuilder { +public abstract class AbstractQueryBuilder> extends ToXContentToBytes implements QueryBuilder { /** Default for boost to apply to resulting Lucene query. Defaults to 1.0*/ public static final float DEFAULT_BOOST = 1.0f; @@ -225,10 +225,10 @@ public abstract class AbstractQueryBuilder exte * their {@link QueryBuilder#toQuery(QueryShardContext)} method are not added to the * resulting collection. */ - protected static Collection toQueries(Collection queryBuilders, QueryShardContext context) throws QueryShardException, + protected static Collection toQueries(Collection> queryBuilders, QueryShardContext context) throws QueryShardException, IOException { List queries = new ArrayList<>(queryBuilders.size()); - for (QueryBuilder queryBuilder : queryBuilders) { + for (QueryBuilder queryBuilder : queryBuilders) { Query query = queryBuilder.toQuery(context); if (query != null) { queries.add(query); @@ -243,15 +243,15 @@ public abstract class AbstractQueryBuilder exte return getWriteableName(); } - protected final void writeQueries(StreamOutput out, List queries) throws IOException { + protected final void writeQueries(StreamOutput out, List> queries) throws IOException { out.writeVInt(queries.size()); - for (QueryBuilder query : queries) { + for (QueryBuilder query : queries) { out.writeQuery(query); } } - protected final List readQueries(StreamInput in) throws IOException { - List queries = new ArrayList<>(); + protected final List> readQueries(StreamInput in) throws IOException { + List> queries = new ArrayList<>(); int size = in.readVInt(); for (int i = 0; i < size; i++) { queries.add(in.readQuery()); diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java index 69ee2a81061..f7f4926d950 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java @@ -49,13 +49,13 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { static final BoolQueryBuilder PROTOTYPE = new BoolQueryBuilder(); - private final List mustClauses = new ArrayList<>(); + private final List> mustClauses = new ArrayList<>(); - private final List mustNotClauses = new ArrayList<>(); + private final List> mustNotClauses = new ArrayList<>(); - private final List filterClauses = new ArrayList<>(); + private final List> filterClauses = new ArrayList<>(); - private final List shouldClauses = new ArrayList<>(); + private final List> shouldClauses = new ArrayList<>(); private boolean disableCoord = DISABLE_COORD_DEFAULT; @@ -67,7 +67,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * Adds a query that must appear in the matching documents and will * contribute to scoring. No null value allowed. */ - public BoolQueryBuilder must(QueryBuilder queryBuilder) { + public BoolQueryBuilder must(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -78,7 +78,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { /** * Gets the queries that must appear in the matching documents. */ - public List must() { + public List> must() { return this.mustClauses; } @@ -86,7 +86,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * Adds a query that must appear in the matching documents but will * not contribute to scoring. No null value allowed. */ - public BoolQueryBuilder filter(QueryBuilder queryBuilder) { + public BoolQueryBuilder filter(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -95,9 +95,9 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { } /** - * Gets the queries that must appear in the matching documents but don't conntribute to scoring + * Gets the queries that must appear in the matching documents but don't contribute to scoring */ - public List filter() { + public List> filter() { return this.filterClauses; } @@ -105,7 +105,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * Adds a query that must not appear in the matching documents. * No null value allowed. */ - public BoolQueryBuilder mustNot(QueryBuilder queryBuilder) { + public BoolQueryBuilder mustNot(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -116,7 +116,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { /** * Gets the queries that must not appear in the matching documents. */ - public List mustNot() { + public List> mustNot() { return this.mustNotClauses; } @@ -127,7 +127,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * * @see #minimumNumberShouldMatch(int) */ - public BoolQueryBuilder should(QueryBuilder queryBuilder) { + public BoolQueryBuilder should(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -141,7 +141,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { * @see #should(QueryBuilder) * @see #minimumNumberShouldMatch(int) */ - public List should() { + public List> should() { return this.shouldClauses; } @@ -244,12 +244,12 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { builder.endObject(); } - private static void doXArrayContent(String field, List clauses, XContentBuilder builder, Params params) throws IOException { + private static void doXArrayContent(String field, List> clauses, XContentBuilder builder, Params params) throws IOException { if (clauses.isEmpty()) { return; } builder.startArray(field); - for (QueryBuilder clause : clauses) { + for (QueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); @@ -282,8 +282,8 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { return adjustPureNegative ? fixNegativeQueryIfNeeded(query) : query; } - private static void addBooleanClauses(QueryShardContext context, BooleanQuery.Builder booleanQueryBuilder, List clauses, Occur occurs) throws IOException { - for (QueryBuilder query : clauses) { + private static void addBooleanClauses(QueryShardContext context, BooleanQuery.Builder booleanQueryBuilder, List> clauses, Occur occurs) throws IOException { + for (QueryBuilder query : clauses) { Query luceneQuery = null; switch (occurs) { case MUST: @@ -321,7 +321,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { @Override protected BoolQueryBuilder doReadFrom(StreamInput in) throws IOException { BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); - List queryBuilders = readQueries(in); + List> queryBuilders = readQueries(in); boolQueryBuilder.mustClauses.addAll(queryBuilders); queryBuilders = readQueries(in); boolQueryBuilder.mustNotClauses.addAll(queryBuilders); diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java index d0d130f9774..53c1b81989f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java @@ -19,17 +19,14 @@ package org.elasticsearch.index.query; -import org.apache.lucene.search.BooleanQuery; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentParser; - import java.io.IOException; import java.util.ArrayList; import java.util.List; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParser; + /** * Parser for bool query */ @@ -45,11 +42,6 @@ public class BoolQueryParser implements QueryParser { public static final ParseField MINIMUM_NUMBER_SHOULD_MATCH = new ParseField("minimum_number_should_match"); public static final ParseField ADJUST_PURE_NEGATIVE = new ParseField("adjust_pure_negative"); - @Inject - public BoolQueryParser(Settings settings) { - BooleanQuery.setMaxClauseCount(settings.getAsInt("index.query.bool.max_clause_count", settings.getAsInt("indices.query.bool.max_clause_count", BooleanQuery.getMaxClauseCount()))); - } - @Override public String[] names() { return new String[]{BoolQueryBuilder.NAME}; diff --git a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java index 43b1917d318..e682ae2c1c0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/DisMaxQueryBuilder.java @@ -40,7 +40,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder public static final String NAME = "dis_max"; - private final ArrayList queries = new ArrayList<>(); + private final List> queries = new ArrayList<>(); /** Default multiplication factor for breaking ties in document scores.*/ public static float DEFAULT_TIE_BREAKER = 0.0f; @@ -51,7 +51,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder /** * Add a sub-query to this disjunction. */ - public DisMaxQueryBuilder add(QueryBuilder queryBuilder) { + public DisMaxQueryBuilder add(QueryBuilder queryBuilder) { if (queryBuilder == null) { throw new IllegalArgumentException("inner dismax query clause cannot be null"); } @@ -62,7 +62,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder /** * @return an immutable list copy of the current sub-queries of this disjunction */ - public List innerQueries() { + public List> innerQueries() { return this.queries; } @@ -90,7 +90,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder builder.startObject(NAME); builder.field(DisMaxQueryParser.TIE_BREAKER_FIELD.getPreferredName(), tieBreaker); builder.startArray(DisMaxQueryParser.QUERIES_FIELD.getPreferredName()); - for (QueryBuilder queryBuilder : queries) { + for (QueryBuilder queryBuilder : queries) { queryBuilder.toXContent(builder, params); } builder.endArray(); @@ -112,7 +112,7 @@ public class DisMaxQueryBuilder extends AbstractQueryBuilder @Override protected DisMaxQueryBuilder doReadFrom(StreamInput in) throws IOException { DisMaxQueryBuilder disMax = new DisMaxQueryBuilder(); - List queryBuilders = readQueries(in); + List> queryBuilders = readQueries(in); disMax.queries.addAll(queryBuilders); disMax.tieBreaker = in.readFloat(); return disMax; diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index d9a99cc50cb..9184281607d 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -372,7 +372,10 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { return null; } - if (query instanceof BooleanQuery) { + // If the coordination factor is disabled on a boolean query we don't apply the minimum should match. + // This is done to make sure that the minimum_should_match doesn't get applied when there is only one word + // and multiple variations of the same word in the query (synonyms for instance). + if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) { query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); } else if (query instanceof ExtendedCommonTermsQuery) { ((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch); diff --git a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java index 7cdb66bd126..ba5d7c2447e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/NestedQueryParser.java @@ -19,15 +19,14 @@ package org.elasticsearch.index.query; +import java.io.IOException; + import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.support.QueryInnerHits; -import java.io.IOException; - public class NestedQueryParser implements QueryParser { private static final NestedQueryBuilder PROTOTYPE = new NestedQueryBuilder("", EmptyQueryBuilder.PROTOTYPE); @@ -38,7 +37,7 @@ public class NestedQueryParser implements QueryParser { @Override public String[] names() { - return new String[]{NestedQueryBuilder.NAME, Strings.toCamelCase(NestedQueryBuilder.NAME)}; + return new String[]{NestedQueryBuilder.NAME}; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java new file mode 100644 index 00000000000..f9bd7623f35 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryBuilder.java @@ -0,0 +1,104 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.DocValuesTermsQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; + +import java.io.IOException; +import java.util.Objects; + +public final class ParentIdQueryBuilder extends AbstractQueryBuilder { + + public static final String NAME = "parent_id"; + static final ParentIdQueryBuilder PROTO = new ParentIdQueryBuilder(null, null); + + private final String type; + private final String id; + + public ParentIdQueryBuilder(String type, String id) { + this.type = type; + this.id = id; + } + + public String getType() { + return type; + } + + public String getId() { + return id; + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field(ParentIdQueryParser.TYPE_FIELD.getPreferredName(), type); + builder.field(ParentIdQueryParser.ID_FIELD.getPreferredName(), id); + printBoostAndQueryName(builder); + builder.endObject(); + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + DocumentMapper childDocMapper = context.getMapperService().documentMapper(type); + if (childDocMapper == null) { + throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]"); + } + ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper(); + if (parentFieldMapper.active() == false) { + throw new QueryShardException(context, "[" + NAME + "] _parent field has no parent type configured"); + } + String fieldName = ParentFieldMapper.joinField(parentFieldMapper.type()); + return new DocValuesTermsQuery(fieldName, id); + } + + @Override + protected ParentIdQueryBuilder doReadFrom(StreamInput in) throws IOException { + String type = in.readString(); + String id = in.readString(); + return new ParentIdQueryBuilder(type, id); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(type); + out.writeString(id); + } + + @Override + protected boolean doEquals(ParentIdQueryBuilder that) { + return Objects.equals(type, that.type) && Objects.equals(id, that.id); + } + + @Override + protected int doHashCode() { + return Objects.hash(type, id); + } + + @Override + public String getWriteableName() { + return NAME; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryParser.java new file mode 100644 index 00000000000..43788624c5e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/query/ParentIdQueryParser.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +public final class ParentIdQueryParser implements QueryParser { + + public static final ParseField ID_FIELD = new ParseField("id"); + public static final ParseField TYPE_FIELD = new ParseField("type", "child_type"); + + @Override + public String[] names() { + return new String[]{ParentIdQueryBuilder.NAME}; + } + + @Override + public ParentIdQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + String type = null; + String id = null; + String queryName = null; + String currentFieldName = null; + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) { + type = parser.text(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, ID_FIELD)) { + id = parser.text(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { + boost = parser.floatValue(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { + queryName = parser.text(); + } else { + throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]"); + } + } else { + throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]"); + } + } + ParentIdQueryBuilder queryBuilder = new ParentIdQueryBuilder(type, id); + queryBuilder.queryName(queryName); + queryBuilder.boost(boost); + return queryBuilder; + } + + @Override + public ParentIdQueryBuilder getBuilderPrototype() { + return ParentIdQueryBuilder.PROTO; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java index 2fde316a561..b75406c8641 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilder.java @@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import java.io.IOException; -public interface QueryBuilder extends NamedWriteable, ToXContent { +public interface QueryBuilder> extends NamedWriteable, ToXContent { /** * Converts this QueryBuilder to a lucene {@link Query}. diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index 6e9c86b4c6a..893c97f332b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -490,6 +490,14 @@ public abstract class QueryBuilders { return new HasParentQueryBuilder(type, query); } + /** + * Constructs a new parent id query that returns all child documents of the specified type that + * point to the specified id. + */ + public static ParentIdQueryBuilder parentId(String type, String id) { + return new ParentIdQueryBuilder(type, id); + } + public static NestedQueryBuilder nestedQuery(String path, QueryBuilder query) { return new NestedQueryBuilder(path, query); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 59e04e40951..fcab39b96e0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -735,7 +735,10 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder clauses = new ArrayList<>(); + private final List> clauses = new ArrayList<>(); private final int slop; @@ -55,7 +55,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder initialClause, int slop) { if (initialClause == null) { throw new IllegalArgumentException("query must include at least one clause"); } @@ -70,7 +70,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clause) { if (clause == null) { throw new IllegalArgumentException("query clauses cannot be null"); } @@ -81,7 +81,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clauses() { + public List> clauses() { return this.clauses; } @@ -106,7 +106,7 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); @@ -129,10 +129,10 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder clauses = readQueries(in); - SpanNearQueryBuilder queryBuilder = new SpanNearQueryBuilder((SpanQueryBuilder)clauses.get(0), in.readVInt()); + List> clauses = readQueries(in); + SpanNearQueryBuilder queryBuilder = new SpanNearQueryBuilder((SpanQueryBuilder)clauses.get(0), in.readVInt()); for (int i = 1; i < clauses.size(); i++) { - queryBuilder.clauses.add((SpanQueryBuilder)clauses.get(i)); + queryBuilder.clauses.add((SpanQueryBuilder)clauses.get(i)); } queryBuilder.inOrder = in.readBoolean(); return queryBuilder; diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java index 3b8681c685b..8d1fb4f7ff3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanOrQueryBuilder.java @@ -38,18 +38,18 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder public static final String NAME = "span_or"; - private final List clauses = new ArrayList<>(); + private final List> clauses = new ArrayList<>(); static final SpanOrQueryBuilder PROTOTYPE = new SpanOrQueryBuilder(SpanTermQueryBuilder.PROTOTYPE); - public SpanOrQueryBuilder(SpanQueryBuilder initialClause) { + public SpanOrQueryBuilder(SpanQueryBuilder initialClause) { if (initialClause == null) { throw new IllegalArgumentException("query must include at least one clause"); } clauses.add(initialClause); } - public SpanOrQueryBuilder clause(SpanQueryBuilder clause) { + public SpanOrQueryBuilder clause(SpanQueryBuilder clause) { if (clause == null) { throw new IllegalArgumentException("inner bool query clause cannot be null"); } @@ -60,7 +60,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder /** * @return the {@link SpanQueryBuilder} clauses that were set for this query */ - public List clauses() { + public List> clauses() { return this.clauses; } @@ -68,7 +68,7 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); builder.startArray(SpanOrQueryParser.CLAUSES_FIELD.getPreferredName()); - for (SpanQueryBuilder clause : clauses) { + for (SpanQueryBuilder clause : clauses) { clause.toXContent(builder, params); } builder.endArray(); @@ -89,10 +89,10 @@ public class SpanOrQueryBuilder extends AbstractQueryBuilder @Override protected SpanOrQueryBuilder doReadFrom(StreamInput in) throws IOException { - List clauses = readQueries(in); - SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder((SpanQueryBuilder)clauses.get(0)); + List> clauses = readQueries(in); + SpanOrQueryBuilder queryBuilder = new SpanOrQueryBuilder((SpanQueryBuilder)clauses.get(0)); for (int i = 1; i < clauses.size(); i++) { - queryBuilder.clauses.add((SpanQueryBuilder)clauses.get(i)); + queryBuilder.clauses.add((SpanQueryBuilder)clauses.get(i)); } return queryBuilder; diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java index d35dcbc536a..90a75a5af1b 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanQueryBuilder.java @@ -22,6 +22,6 @@ package org.elasticsearch.index.query; /** * Marker interface for a specific type of {@link QueryBuilder} that allows to build span queries */ -public interface SpanQueryBuilder extends QueryBuilder { +public interface SpanQueryBuilder> extends QueryBuilder { } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java index 6822ab3e240..8e503e24d3c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java @@ -19,10 +19,13 @@ package org.elasticsearch.index.query.functionscore; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; @@ -36,10 +39,6 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - /** * Parser for function_score query */ @@ -54,7 +53,6 @@ public class FunctionScoreQueryParser implements QueryParser> functionParsers; - @Inject - public ScoreFunctionParserMapper(Set parsers, NamedWriteableRegistry namedWriteableRegistry) { - Map> map = new HashMap<>(); - // built-in parsers - addParser(new ScriptScoreFunctionParser(), map, namedWriteableRegistry); - addParser(new GaussDecayFunctionParser(), map, namedWriteableRegistry); - addParser(new LinearDecayFunctionParser(), map, namedWriteableRegistry); - addParser(new ExponentialDecayFunctionParser(), map, namedWriteableRegistry); - addParser(new RandomScoreFunctionParser(), map, namedWriteableRegistry); - addParser(new FieldValueFactorFunctionParser(), map, namedWriteableRegistry); - for (ScoreFunctionParser scoreFunctionParser : parsers) { - addParser(scoreFunctionParser, map, namedWriteableRegistry); - } - this.functionParsers = Collections.unmodifiableMap(map); - //weight doesn't have its own parser, so every function supports it out of the box. - //Can be a single function too when not associated to any other function, which is why it needs to be registered manually here. - namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, new WeightBuilder()); + public ScoreFunctionParserMapper(Map> functionParsers) { + this.functionParsers = unmodifiableMap(functionParsers); } - public ScoreFunctionParser get(XContentLocation contentLocation, String parserName) { - ScoreFunctionParser functionParser = get(parserName); + public ScoreFunctionParser get(XContentLocation contentLocation, String parserName) { + ScoreFunctionParser functionParser = get(parserName); if (functionParser == null) { throw new ParsingException(contentLocation, "No function with the name [" + parserName + "] is registered."); } return functionParser; } - private ScoreFunctionParser get(String parserName) { + private ScoreFunctionParser get(String parserName) { return functionParsers.get(parserName); } - - private static void addParser(ScoreFunctionParser scoreFunctionParser, Map> map, NamedWriteableRegistry namedWriteableRegistry) { - for (String name : scoreFunctionParser.getNames()) { - map.put(name, scoreFunctionParser); - - } - @SuppressWarnings("unchecked") NamedWriteable sfb = scoreFunctionParser.getBuilderPrototype(); - namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, sfb); - } } diff --git a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java index 1b213645ae5..979bfba605f 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -212,10 +212,6 @@ public class MatchQuery { this.zeroTermsQuery = zeroTermsQuery; } - protected boolean forceAnalyzeQueryString() { - return false; - } - protected Analyzer getAnalyzer(MappedFieldType fieldType) { if (this.analyzer == null) { if (fieldType != null) { @@ -240,17 +236,19 @@ public class MatchQuery { field = fieldName; } - if (fieldType != null && fieldType.useTermQueryWithQueryString() && !forceAnalyzeQueryString()) { - try { - return fieldType.termQuery(value, context); - } catch (RuntimeException e) { - if (lenient) { - return null; - } - throw e; - } - + /* + * If the user forced an analyzer we really don't care if they are + * searching a type that wants term queries to be used with query string + * because the QueryBuilder will take care of it. If they haven't forced + * an analyzer then types like NumberFieldType that want terms with + * query string will blow up because their analyzer isn't capable of + * passing through QueryBuilder. + */ + boolean noForcedAnalyzer = this.analyzer == null; + if (fieldType != null && fieldType.useTermQueryWithQueryString() && noForcedAnalyzer) { + return termQuery(fieldType, value); } + Analyzer analyzer = getAnalyzer(fieldType); assert analyzer != null; MatchQueryBuilder builder = new MatchQueryBuilder(analyzer, fieldType); @@ -282,6 +280,26 @@ public class MatchQuery { } } + /** + * Creates a TermQuery-like-query for MappedFieldTypes that don't support + * QueryBuilder which is very string-ish. Just delegates to the + * MappedFieldType for MatchQuery but gets more complex for blended queries. + */ + protected Query termQuery(MappedFieldType fieldType, Object value) { + return termQuery(fieldType, value, lenient); + } + + protected final Query termQuery(MappedFieldType fieldType, Object value, boolean lenient) { + try { + return fieldType.termQuery(value, context); + } catch (RuntimeException e) { + if (lenient) { + return null; + } + throw e; + } + } + protected Query zeroTermsQuery() { return zeroTermsQuery == DEFAULT_ZERO_TERMS_QUERY ? Queries.newMatchNoDocsQuery() : Queries.newMatchAllQuery(); } @@ -289,20 +307,20 @@ public class MatchQuery { private class MatchQueryBuilder extends QueryBuilder { private final MappedFieldType mapper; + /** * Creates a new QueryBuilder using the given analyzer. */ public MatchQueryBuilder(Analyzer analyzer, @Nullable MappedFieldType mapper) { super(analyzer); this.mapper = mapper; - } + } @Override protected Query newTermQuery(Term term) { return blendTermQuery(term, mapper); } - public Query createPhrasePrefixQuery(String field, String queryText, int phraseSlop, int maxExpansions) { final Query query = createFieldQuery(getAnalyzer(), Occur.MUST, field, queryText, true, phraseSlop); final MultiPhrasePrefixQuery prefixQuery = new MultiPhrasePrefixQuery(); @@ -352,11 +370,16 @@ public class MatchQuery { protected Query blendTermQuery(Term term, MappedFieldType fieldType) { if (fuzziness != null) { if (fieldType != null) { - Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions); - if (query instanceof FuzzyQuery) { - QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod); + try { + Query query = fieldType.fuzzyQuery(term.text(), fuzziness, fuzzyPrefixLength, maxExpansions, transpositions); + if (query instanceof FuzzyQuery) { + QueryParsers.setRewriteMethod((FuzzyQuery) query, fuzzyRewriteMethod); + } + return query; + } catch (RuntimeException e) { + return new TermQuery(term); + // See long comment below about why we're lenient here. } - return query; } int edits = fuzziness.asDistance(term.text()); FuzzyQuery query = new FuzzyQuery(term, edits, fuzzyPrefixLength, maxExpansions, transpositions); @@ -364,9 +387,25 @@ public class MatchQuery { return query; } if (fieldType != null) { - Query termQuery = fieldType.queryStringTermQuery(term); - if (termQuery != null) { - return termQuery; + /* + * Its a bit weird to default to lenient here but its the backwards + * compatible. It makes some sense when you think about what we are + * doing here: at this point the user has forced an analyzer and + * passed some string to the match query. We cut it up using the + * analyzer and then tried to cram whatever we get into the field. + * lenient=true here means that we try the terms in the query and on + * the off chance that they are actually valid terms then we + * actually try them. lenient=false would mean that we blow up the + * query if they aren't valid terms. "valid" in this context means + * "parses properly to something of the type being queried." So "1" + * is a valid number, etc. + * + * We use the text form here because we we've received the term from + * an analyzer that cut some string into text. + */ + Query query = termQuery(fieldType, term.bytes(), true); + if (query != null) { + return query; } } return new TermQuery(term); diff --git a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index 7e7118ba329..dbe4de5ad86 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.mapper.MappedFieldType; @@ -55,7 +54,10 @@ public class MultiMatchQuery extends MatchQuery { private Query parseAndApply(Type type, String fieldName, Object value, String minimumShouldMatch, Float boostValue) throws IOException { Query query = parse(type, fieldName, value); - if (query instanceof BooleanQuery) { + // If the coordination factor is disabled on a boolean query we don't apply the minimum should match. + // This is done to make sure that the minimum_should_match doesn't get applied when there is only one word + // and multiple variations of the same word in the query (synonyms for instance). + if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) { query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); } if (query != null && boostValue != null && boostValue != AbstractQueryBuilder.DEFAULT_BOOST) { @@ -104,7 +106,7 @@ public class MultiMatchQuery extends MatchQuery { this.tieBreaker = tieBreaker; } - public List buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map fieldNames, Object value, String minimumShouldMatch) throws IOException{ + public List buildGroupedQueries(MultiMatchQueryBuilder.Type type, Map fieldNames, Object value, String minimumShouldMatch) throws IOException{ List queries = new ArrayList<>(); for (String fieldName : fieldNames.keySet()) { Float boostValue = fieldNames.get(fieldName); @@ -146,8 +148,8 @@ public class MultiMatchQuery extends MatchQuery { return MultiMatchQuery.super.blendTermQuery(term, fieldType); } - public boolean forceAnalyzeQueryString() { - return false; + public Query termQuery(MappedFieldType fieldType, Object value) { + return MultiMatchQuery.this.termQuery(fieldType, value, lenient); } } @@ -196,8 +198,13 @@ public class MultiMatchQuery extends MatchQuery { } else { blendedFields = null; } - final FieldAndFieldType fieldAndFieldType = group.get(0); - Query q = parseGroup(type.matchQueryType(), fieldAndFieldType.field, 1f, value, minimumShouldMatch); + /* + * We have to pick some field to pass through the superclass so + * we just pick the first field. It shouldn't matter because + * fields are already grouped by their analyzers/types. + */ + String representativeField = group.get(0).field; + Query q = parseGroup(type.matchQueryType(), representativeField, 1f, value, minimumShouldMatch); if (q != null) { queries.add(q); } @@ -206,11 +213,6 @@ public class MultiMatchQuery extends MatchQuery { return queries.isEmpty() ? null : queries; } - @Override - public boolean forceAnalyzeQueryString() { - return blendedFields != null; - } - @Override public Query blendTerm(Term term, MappedFieldType fieldType) { if (blendedFields == null) { @@ -231,6 +233,16 @@ public class MultiMatchQuery extends MatchQuery { } return BlendedTermQuery.dismaxBlendedQuery(terms, blendedBoost, tieBreaker); } + + @Override + public Query termQuery(MappedFieldType fieldType, Object value) { + /* + * Use the string value of the term because we're reusing the + * portion of the query is usually after the analyzer has run on + * each term. We just skip that analyzer phase. + */ + return blendTerm(new Term(fieldType.name(), value.toString()), fieldType); + } } @Override @@ -241,6 +253,15 @@ public class MultiMatchQuery extends MatchQuery { return queryBuilder.blendTerm(term, fieldType); } + @Override + protected Query termQuery(MappedFieldType fieldType, Object value) { + if (queryBuilder == null) { + // Can be null when the MultiMatchQuery collapses into a MatchQuery + return super.termQuery(fieldType, value); + } + return queryBuilder.termQuery(fieldType, value); + } + private static final class FieldAndFieldType { final String field; final MappedFieldType fieldType; @@ -255,18 +276,17 @@ public class MultiMatchQuery extends MatchQuery { public Term newTerm(String value) { try { - final BytesRef bytesRef = fieldType.indexedValueForSearch(value); - return new Term(field, bytesRef); - } catch (Exception ex) { + /* + * Note that this ignore any overrides the fieldType might do + * for termQuery, meaning things like _parent won't work here. + */ + return new Term(fieldType.name(), fieldType.indexedValueForSearch(value)); + } catch (RuntimeException ex) { // we can't parse it just use the incoming value -- it will // just have a DF of 0 at the end of the day and will be ignored + // Note that this is like lenient = true allways } return new Term(field, value); } } - - @Override - protected boolean forceAnalyzeQueryString() { - return this.queryBuilder == null ? super.forceAnalyzeQueryString() : this.queryBuilder.forceAnalyzeQueryString(); - } } diff --git a/core/src/main/java/org/elasticsearch/index/search/stats/SearchSlowLog.java b/core/src/main/java/org/elasticsearch/index/search/stats/SearchSlowLog.java deleted file mode 100644 index f3089b9a5f7..00000000000 --- a/core/src/main/java/org/elasticsearch/index/search/stats/SearchSlowLog.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.stats; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.search.internal.SearchContext; - -import java.util.Locale; -import java.util.concurrent.TimeUnit; - -/** - */ -public final class SearchSlowLog { - - private boolean reformat; - - private long queryWarnThreshold; - private long queryInfoThreshold; - private long queryDebugThreshold; - private long queryTraceThreshold; - - private long fetchWarnThreshold; - private long fetchInfoThreshold; - private long fetchDebugThreshold; - private long fetchTraceThreshold; - - private String level; - - private final ESLogger queryLogger; - private final ESLogger fetchLogger; - - private static final String INDEX_SEARCH_SLOWLOG_PREFIX = "index.search.slowlog"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.warn"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.info"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.debug"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.query.trace"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.warn"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.info"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.debug"; - public static final String INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE = INDEX_SEARCH_SLOWLOG_PREFIX + ".threshold.fetch.trace"; - public static final String INDEX_SEARCH_SLOWLOG_REFORMAT = INDEX_SEARCH_SLOWLOG_PREFIX + ".reformat"; - public static final String INDEX_SEARCH_SLOWLOG_LEVEL = INDEX_SEARCH_SLOWLOG_PREFIX + ".level"; - - public SearchSlowLog(Settings indexSettings) { - - this.reformat = indexSettings.getAsBoolean(INDEX_SEARCH_SLOWLOG_REFORMAT, true); - - this.queryWarnThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN, TimeValue.timeValueNanos(-1)).nanos(); - this.queryInfoThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO, TimeValue.timeValueNanos(-1)).nanos(); - this.queryDebugThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG, TimeValue.timeValueNanos(-1)).nanos(); - this.queryTraceThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE, TimeValue.timeValueNanos(-1)).nanos(); - - this.fetchWarnThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN, TimeValue.timeValueNanos(-1)).nanos(); - this.fetchInfoThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO, TimeValue.timeValueNanos(-1)).nanos(); - this.fetchDebugThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG, TimeValue.timeValueNanos(-1)).nanos(); - this.fetchTraceThreshold = indexSettings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE, TimeValue.timeValueNanos(-1)).nanos(); - - this.level = indexSettings.get(INDEX_SEARCH_SLOWLOG_LEVEL, "TRACE").toUpperCase(Locale.ROOT); - - this.queryLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query"); - this.fetchLogger = Loggers.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch"); - - queryLogger.setLevel(level); - fetchLogger.setLevel(level); - } - - void onQueryPhase(SearchContext context, long tookInNanos) { - if (queryWarnThreshold >= 0 && tookInNanos > queryWarnThreshold) { - queryLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (queryInfoThreshold >= 0 && tookInNanos > queryInfoThreshold) { - queryLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (queryDebugThreshold >= 0 && tookInNanos > queryDebugThreshold) { - queryLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (queryTraceThreshold >= 0 && tookInNanos > queryTraceThreshold) { - queryLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } - } - - void onFetchPhase(SearchContext context, long tookInNanos) { - if (fetchWarnThreshold >= 0 && tookInNanos > fetchWarnThreshold) { - fetchLogger.warn("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (fetchInfoThreshold >= 0 && tookInNanos > fetchInfoThreshold) { - fetchLogger.info("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (fetchDebugThreshold >= 0 && tookInNanos > fetchDebugThreshold) { - fetchLogger.debug("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } else if (fetchTraceThreshold >= 0 && tookInNanos > fetchTraceThreshold) { - fetchLogger.trace("{}", new SlowLogSearchContextPrinter(context, tookInNanos, reformat)); - } - } - - public void onRefreshSettings(Settings settings) { - long queryWarnThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN, TimeValue.timeValueNanos(this.queryWarnThreshold)).nanos(); - if (queryWarnThreshold != this.queryWarnThreshold) { - this.queryWarnThreshold = queryWarnThreshold; - } - long queryInfoThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO, TimeValue.timeValueNanos(this.queryInfoThreshold)).nanos(); - if (queryInfoThreshold != this.queryInfoThreshold) { - this.queryInfoThreshold = queryInfoThreshold; - } - long queryDebugThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG, TimeValue.timeValueNanos(this.queryDebugThreshold)).nanos(); - if (queryDebugThreshold != this.queryDebugThreshold) { - this.queryDebugThreshold = queryDebugThreshold; - } - long queryTraceThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE, TimeValue.timeValueNanos(this.queryTraceThreshold)).nanos(); - if (queryTraceThreshold != this.queryTraceThreshold) { - this.queryTraceThreshold = queryTraceThreshold; - } - - long fetchWarnThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN, TimeValue.timeValueNanos(this.fetchWarnThreshold)).nanos(); - if (fetchWarnThreshold != this.fetchWarnThreshold) { - this.fetchWarnThreshold = fetchWarnThreshold; - } - long fetchInfoThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO, TimeValue.timeValueNanos(this.fetchInfoThreshold)).nanos(); - if (fetchInfoThreshold != this.fetchInfoThreshold) { - this.fetchInfoThreshold = fetchInfoThreshold; - } - long fetchDebugThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG, TimeValue.timeValueNanos(this.fetchDebugThreshold)).nanos(); - if (fetchDebugThreshold != this.fetchDebugThreshold) { - this.fetchDebugThreshold = fetchDebugThreshold; - } - long fetchTraceThreshold = settings.getAsTime(INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE, TimeValue.timeValueNanos(this.fetchTraceThreshold)).nanos(); - if (fetchTraceThreshold != this.fetchTraceThreshold) { - this.fetchTraceThreshold = fetchTraceThreshold; - } - - String level = settings.get(INDEX_SEARCH_SLOWLOG_LEVEL, this.level); - if (!level.equals(this.level)) { - this.queryLogger.setLevel(level.toUpperCase(Locale.ROOT)); - this.fetchLogger.setLevel(level.toUpperCase(Locale.ROOT)); - this.level = level; - } - - boolean reformat = settings.getAsBoolean(INDEX_SEARCH_SLOWLOG_REFORMAT, this.reformat); - if (reformat != this.reformat) { - this.reformat = reformat; - } - } - - private static class SlowLogSearchContextPrinter { - private final SearchContext context; - private final long tookInNanos; - private final boolean reformat; - - public SlowLogSearchContextPrinter(SearchContext context, long tookInNanos, boolean reformat) { - this.context = context; - this.tookInNanos = tookInNanos; - this.reformat = reformat; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], "); - if (context.types() == null) { - sb.append("types[], "); - } else { - sb.append("types["); - Strings.arrayToDelimitedString(context.types(), ",", sb); - sb.append("], "); - } - if (context.groupStats() == null) { - sb.append("stats[], "); - } else { - sb.append("stats["); - Strings.collectionToDelimitedString(context.groupStats(), ",", "", "", sb); - sb.append("], "); - } - sb.append("search_type[").append(context.searchType()).append("], total_shards[").append(context.numberOfShards()).append("], "); - if (context.request().source() != null) { - sb.append("source[").append(context.request().source()).append("], "); - } else { - sb.append("source[], "); - } - return sb.toString(); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java b/core/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java index d65b19e56df..1a155d17964 100644 --- a/core/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java +++ b/core/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.SearchSlowLog; import org.elasticsearch.search.internal.SearchContext; import java.util.HashMap; @@ -179,10 +180,6 @@ public final class ShardSearchStats { totalStats.scrollMetric.inc(System.nanoTime() - context.getOriginNanoTime()); } - public void onRefreshSettings(Settings settings) { - slowLogSearchService.onRefreshSettings(settings); - } - final static class StatsHolder { public final MeanMetric queryMetric = new MeanMetric(); public final MeanMetric fetchMetric = new MeanMetric(); diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index bdbb12245e1..73ba9d04003 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -91,7 +91,7 @@ import org.elasticsearch.index.percolator.PercolatorQueriesRegistry; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; -import org.elasticsearch.index.search.stats.SearchSlowLog; +import org.elasticsearch.index.SearchSlowLog; import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.search.stats.ShardSearchStats; import org.elasticsearch.index.similarity.SimilarityService; @@ -237,13 +237,13 @@ public class IndexShard extends AbstractIndexShardComponent { /* create engine config */ logger.debug("state: [CREATED]"); - this.checkIndexOnStartup = settings.get("index.shard.check_on_startup", "false"); + this.checkIndexOnStartup = indexSettings.getValue(IndexSettings.INDEX_CHECK_ON_STARTUP); this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, provider.getBigArrays()); final QueryCachingPolicy cachingPolicy; // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis - if (settings.getAsBoolean(IndexModule.QUERY_CACHE_EVERYTHING, false)) { + if (IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.get(settings)) { cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE; } else { cachingPolicy = new UsageTrackingQueryCachingPolicy(); @@ -541,25 +541,23 @@ public class IndexShard extends AbstractIndexShardComponent { /** Writes all indexing changes to disk and opens a new searcher reflecting all changes. This can throw {@link EngineClosedException}. */ public void refresh(String source) { verifyNotClosed(); - if (getEngine().refreshNeeded()) { - if (canIndex()) { - long bytes = getEngine().getIndexBufferRAMBytesUsed(); - writingBytes.addAndGet(bytes); - try { - logger.debug("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes)); - long time = System.nanoTime(); - getEngine().refresh(source); - refreshMetric.inc(System.nanoTime() - time); - } finally { - logger.debug("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId()); - writingBytes.addAndGet(-bytes); - } - } else { - logger.debug("refresh with source [{}]", source); + if (canIndex()) { + long bytes = getEngine().getIndexBufferRAMBytesUsed(); + writingBytes.addAndGet(bytes); + try { + logger.debug("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes)); long time = System.nanoTime(); getEngine().refresh(source); refreshMetric.inc(System.nanoTime() - time); + } finally { + logger.debug("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId()); + writingBytes.addAndGet(-bytes); } + } else { + logger.debug("refresh with source [{}]", source); + long time = System.nanoTime(); + getEngine().refresh(source); + refreshMetric.inc(System.nanoTime() - time); } } @@ -1208,7 +1206,7 @@ public class IndexShard extends AbstractIndexShardComponent { BytesStreamOutput os = new BytesStreamOutput(); PrintStream out = new PrintStream(os, false, StandardCharsets.UTF_8.name()); - if ("checksum".equalsIgnoreCase(checkIndexOnStartup)) { + if ("checksum".equals(checkIndexOnStartup)) { // physical verification only: verify all checksums for the latest commit IOException corrupt = null; MetadataSnapshot metadata = store.getMetadata(); @@ -1240,7 +1238,7 @@ public class IndexShard extends AbstractIndexShardComponent { return; } logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); - if ("fix".equalsIgnoreCase(checkIndexOnStartup)) { + if ("fix".equals(checkIndexOnStartup)) { if (logger.isDebugEnabled()) { logger.debug("fixing index, writing new segments file ..."); } @@ -1514,4 +1512,15 @@ public class IndexShard extends AbstractIndexShardComponent { return engineFactory; } + /** + * Returns true iff one or more changes to the engine are not visible to via the current searcher. + * Otherwise false. + * + * @throws EngineClosedException if the engine is already closed + * @throws AlreadyClosedException if the internal indexwriter in the engine is already closed + */ + public boolean isRefreshNeeded() { + return getEngine().refreshNeeded(); + } + } diff --git a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java index f82b832a152..e3600291318 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/ShadowIndexShard.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.search.stats.SearchSlowLog; +import org.elasticsearch.index.SearchSlowLog; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.TranslogStats; diff --git a/core/src/test/java/org/elasticsearch/recovery/SmallTranslogOpsRecoveryIT.java b/core/src/main/java/org/elasticsearch/index/similarity/DFISimilarityProvider.java similarity index 51% rename from core/src/test/java/org/elasticsearch/recovery/SmallTranslogOpsRecoveryIT.java rename to core/src/main/java/org/elasticsearch/index/similarity/DFISimilarityProvider.java index e716736252c..30b07a06c34 100644 --- a/core/src/test/java/org/elasticsearch/recovery/SmallTranslogOpsRecoveryIT.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/DFISimilarityProvider.java @@ -17,17 +17,35 @@ * under the License. */ -package org.elasticsearch.recovery; +package org.elasticsearch.index.similarity; +import org.apache.lucene.search.similarities.DFISimilarity; +import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.common.settings.Settings; /** - * + * {@link SimilarityProvider} for the {@link DFISimilarity}. + *

    + * Configuration options available: + *

      + *
    • discount_overlaps
    • + *
    + * @see DFISimilarity For more information about configuration */ -public class SmallTranslogOpsRecoveryIT extends SimpleRecoveryIT { +public class DFISimilarityProvider extends AbstractSimilarityProvider { + + private final DFISimilarity similarity; + + public DFISimilarityProvider(String name, Settings settings) { + super(name); + boolean discountOverlaps = settings.getAsBoolean("discount_overlaps", true); + + this.similarity = new DFISimilarity(); + this.similarity.setDiscountOverlaps(discountOverlaps); + } @Override - protected Settings recoverySettings() { - return Settings.settingsBuilder().put("index.shard.recovery.translog_ops", 1).build(); + public Similarity get() { + return similarity; } } diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index f564b0e91d1..e950ebda1b3 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -52,6 +52,7 @@ public final class SimilarityService extends AbstractIndexComponent { buildIn.put("IB", IBSimilarityProvider::new); buildIn.put("LMDirichlet", LMDirichletSimilarityProvider::new); buildIn.put("LMJelinekMercer", LMJelinekMercerSimilarityProvider::new); + buildIn.put("DFI", DFISimilarityProvider::new); DEFAULTS = Collections.unmodifiableMap(defaults); BUILT_IN = Collections.unmodifiableMap(buildIn); } diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index fcc18f8b678..d5d6d5234be 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -33,6 +33,7 @@ import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.Constants; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.metrics.CounterMetric; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.IndexModule; @@ -50,7 +51,16 @@ import java.util.Set; public class FsDirectoryService extends DirectoryService implements StoreRateLimiting.Listener, StoreRateLimiting.Provider { protected final IndexStore indexStore; - + public static final Setting INDEX_LOCK_FACTOR_SETTING = new Setting<>("index.store.fs.fs_lock", "native", (s) -> { + switch (s) { + case "native": + return NativeFSLockFactory.INSTANCE; + case "simple": + return SimpleFSLockFactory.INSTANCE; + default: + throw new IllegalArgumentException("unrecognized [index.store.fs.fs_lock] \"" + s + "\": must be native or simple"); + } + }, false, Setting.Scope.INDEX); private final CounterMetric rateLimitingTimeInNanos = new CounterMetric(); private final ShardPath path; @@ -71,29 +81,11 @@ public class FsDirectoryService extends DirectoryService implements StoreRateLim return indexStore.rateLimiting(); } - public static LockFactory buildLockFactory(IndexSettings indexSettings) { - final Settings settings = indexSettings.getSettings(); - String fsLock = settings.get("index.store.fs.lock", settings.get("index.store.fs.fs_lock", "native")); - LockFactory lockFactory; - if (fsLock.equals("native")) { - lockFactory = NativeFSLockFactory.INSTANCE; - } else if (fsLock.equals("simple")) { - lockFactory = SimpleFSLockFactory.INSTANCE; - } else { - throw new IllegalArgumentException("unrecognized fs_lock \"" + fsLock + "\": must be native or simple"); - } - return lockFactory; - } - - protected final LockFactory buildLockFactory() throws IOException { - return buildLockFactory(indexSettings); - } - @Override public Directory newDirectory() throws IOException { final Path location = path.resolveIndex(); Files.createDirectories(location); - Directory wrapped = newFSDirectory(location, buildLockFactory()); + Directory wrapped = newFSDirectory(location, indexSettings.getValue(INDEX_LOCK_FACTOR_SETTING)); return new RateLimitedFSDirectory(wrapped, this, this) ; } @@ -112,7 +104,7 @@ public class FsDirectoryService extends DirectoryService implements StoreRateLim protected Directory newFSDirectory(Path location, LockFactory lockFactory) throws IOException { - final String storeType = indexSettings.getSettings().get(IndexModule.STORE_TYPE, IndexModule.Type.DEFAULT.getSettingsKey()); + final String storeType = indexSettings.getSettings().get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.DEFAULT.getSettingsKey()); if (IndexModule.Type.FS.match(storeType) || IndexModule.Type.DEFAULT.match(storeType)) { final FSDirectory open = FSDirectory.open(location, lockFactory); // use lucene defaults if (open instanceof MMapDirectory && Constants.WINDOWS == false) { diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java index ea6f59b0520..e98ad7cc6eb 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStore.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStore.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.store; import org.apache.lucene.store.StoreRateLimiting; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; @@ -29,32 +29,19 @@ import org.elasticsearch.index.shard.ShardPath; * */ public class IndexStore extends AbstractIndexComponent { - - public static final String INDEX_STORE_THROTTLE_TYPE = "index.store.throttle.type"; - public static final String INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC = "index.store.throttle.max_bytes_per_sec"; + public static final Setting INDEX_STORE_THROTTLE_TYPE_SETTING = new Setting<>("index.store.throttle.type", "none", IndexRateLimitingType::fromString, true, Setting.Scope.INDEX) ; + public static final Setting INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING = Setting.byteSizeSetting("index.store.throttle.max_bytes_per_sec", new ByteSizeValue(0), true, Setting.Scope.INDEX); protected final IndexStoreConfig indexStoreConfig; - private volatile String rateLimitingType; - private volatile ByteSizeValue rateLimitingThrottle; - private volatile boolean nodeRateLimiting; - private final StoreRateLimiting rateLimiting = new StoreRateLimiting(); + private volatile IndexRateLimitingType type; public IndexStore(IndexSettings indexSettings, IndexStoreConfig indexStoreConfig) { super(indexSettings); this.indexStoreConfig = indexStoreConfig; - - this.rateLimitingType = indexSettings.getSettings().get(INDEX_STORE_THROTTLE_TYPE, "none"); - if (rateLimitingType.equalsIgnoreCase("node")) { - nodeRateLimiting = true; - } else { - nodeRateLimiting = false; - rateLimiting.setType(rateLimitingType); - } - this.rateLimitingThrottle = indexSettings.getSettings().getAsBytesSize(INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, new ByteSizeValue(0)); - rateLimiting.setMaxRate(rateLimitingThrottle); - - logger.debug("using index.store.throttle.type [{}], with index.store.throttle.max_bytes_per_sec [{}]", rateLimitingType, rateLimitingThrottle); + setType(indexSettings.getValue(INDEX_STORE_THROTTLE_TYPE_SETTING)); + rateLimiting.setMaxRate(indexSettings.getValue(INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING)); + logger.debug("using index.store.throttle.type [{}], with index.store.throttle.max_bytes_per_sec [{}]", rateLimiting.getType(), rateLimiting.getRateLimiter()); } /** @@ -62,7 +49,7 @@ public class IndexStore extends AbstractIndexComponent { * the node level one (defaults to the node level one). */ public StoreRateLimiting rateLimiting() { - return nodeRateLimiting ? indexStoreConfig.getNodeRateLimiter() : this.rateLimiting; + return type.useStoreLimiter() ? indexStoreConfig.getNodeRateLimiter() : this.rateLimiting; } /** @@ -72,26 +59,44 @@ public class IndexStore extends AbstractIndexComponent { return new FsDirectoryService(indexSettings, this, path); } - public void onRefreshSettings(Settings settings) { - String rateLimitingType = settings.get(INDEX_STORE_THROTTLE_TYPE, IndexStore.this.rateLimitingType); - if (!rateLimitingType.equals(IndexStore.this.rateLimitingType)) { - logger.info("updating index.store.throttle.type from [{}] to [{}]", IndexStore.this.rateLimitingType, rateLimitingType); - if (rateLimitingType.equalsIgnoreCase("node")) { - IndexStore.this.rateLimitingType = rateLimitingType; - IndexStore.this.nodeRateLimiting = true; - } else { - StoreRateLimiting.Type.fromString(rateLimitingType); - IndexStore.this.rateLimitingType = rateLimitingType; - IndexStore.this.nodeRateLimiting = false; - IndexStore.this.rateLimiting.setType(rateLimitingType); - } - } - - ByteSizeValue rateLimitingThrottle = settings.getAsBytesSize(INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, IndexStore.this.rateLimitingThrottle); - if (!rateLimitingThrottle.equals(IndexStore.this.rateLimitingThrottle)) { - logger.info("updating index.store.throttle.max_bytes_per_sec from [{}] to [{}], note, type is [{}]", IndexStore.this.rateLimitingThrottle, rateLimitingThrottle, IndexStore.this.rateLimitingType); - IndexStore.this.rateLimitingThrottle = rateLimitingThrottle; - IndexStore.this.rateLimiting.setMaxRate(rateLimitingThrottle); + public void setType(IndexRateLimitingType type) { + this.type = type; + if (type.useStoreLimiter() == false) { + rateLimiting.setType(type.type); } } + + public void setMaxRate(ByteSizeValue rate) { + rateLimiting.setMaxRate(rate); + } + + /** + * On an index level we can configure all of {@link org.apache.lucene.store.StoreRateLimiting.Type} as well as + * node which will then use a global rate limiter that has it's own configuration. The global one is + * configured in {@link IndexStoreConfig} which is managed by the per-node {@link org.elasticsearch.indices.IndicesService} + */ + public static final class IndexRateLimitingType { + private final StoreRateLimiting.Type type; + + private IndexRateLimitingType(StoreRateLimiting.Type type) { + this.type = type; + } + + private boolean useStoreLimiter() { + return type == null; + } + + static IndexRateLimitingType fromString(String type) { + if ("node".equalsIgnoreCase(type)) { + return new IndexRateLimitingType(null); + } else { + try { + return new IndexRateLimitingType(StoreRateLimiting.Type.fromString(type)); + } catch (IllegalArgumentException ex) { + throw new IllegalArgumentException("rate limiting type [" + type + "] not valid, can be one of [all|merge|none|node]"); + } + } + } + } + } diff --git a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java index ed561876735..ab7075afa5b 100644 --- a/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java +++ b/core/src/main/java/org/elasticsearch/index/store/IndexStoreConfig.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; * indices.store.throttle.type or indices.store.throttle.max_bytes_per_sec are reflected immediately * on all referencing {@link IndexStore} instances */ -public class IndexStoreConfig{ +public class IndexStoreConfig { /** * Configures the node / cluster level throttle type. See {@link StoreRateLimiting.Type}. diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index 3b3074e2385..f6e48e718b9 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -61,6 +61,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.Callback; @@ -81,6 +82,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.file.NoSuchFileException; import java.nio.file.Path; +import java.sql.Time; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -126,7 +128,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref static final int VERSION_START = 0; static final int VERSION = VERSION_WRITE_THROWABLE; static final String CORRUPTED = "corrupted_"; - public static final String INDEX_STORE_STATS_REFRESH_INTERVAL = "index.store.stats_refresh_interval"; + public static final Setting INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING = Setting.timeSetting("index.store.stats_refresh_interval", TimeValue.timeValueSeconds(10), false, Setting.Scope.INDEX); private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; @@ -154,7 +156,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref this.directory = new StoreDirectory(directoryService.newDirectory(), Loggers.getLogger("index.store.deletes", settings, shardId)); this.shardLock = shardLock; this.onClose = onClose; - final TimeValue refreshInterval = settings.getAsTime(INDEX_STORE_STATS_REFRESH_INTERVAL, TimeValue.timeValueSeconds(10)); + final TimeValue refreshInterval = indexSettings.getValue(INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING); this.statsCache = new StoreStatsCache(refreshInterval, directory, directoryService); logger.debug("store stats are refreshed with refresh_interval [{}]", refreshInterval); diff --git a/core/src/main/java/org/elasticsearch/index/store/StoreModule.java b/core/src/main/java/org/elasticsearch/index/store/StoreModule.java deleted file mode 100644 index fccd2de2e43..00000000000 --- a/core/src/main/java/org/elasticsearch/index/store/StoreModule.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.store; - -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.env.ShardLock; -import org.elasticsearch.index.shard.ShardPath; - -/** - * - */ -public class StoreModule extends AbstractModule { - private final ShardLock lock; - private final Store.OnClose closeCallback; - private final ShardPath path; - private final Class shardDirectory; - - - public StoreModule(Class shardDirectory, ShardLock lock, Store.OnClose closeCallback, ShardPath path) { - this.shardDirectory = shardDirectory; - this.lock = lock; - this.closeCallback = closeCallback; - this.path = path; - } - - @Override - protected void configure() { - bind(DirectoryService.class).to(shardDirectory).asEagerSingleton(); - bind(Store.class).asEagerSingleton(); - bind(ShardLock.class).toInstance(lock); - bind(Store.OnClose.class).toInstance(closeCallback); - bind(ShardPath.class).toInstance(path); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java new file mode 100644 index 00000000000..c98ea69f87f --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java @@ -0,0 +1,136 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.translog; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.ByteBufferStreamInput; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.file.Path; + +/** + * A base class for all classes that allows reading ops from translog files + */ +public abstract class BaseTranslogReader implements Comparable { + + protected final long generation; + protected final FileChannel channel; + protected final Path path; + protected final long firstOperationOffset; + + public BaseTranslogReader(long generation, FileChannel channel, Path path, long firstOperationOffset) { + assert Translog.parseIdFromFileName(path) == generation : "generation missmatch. Path: " + Translog.parseIdFromFileName(path) + " but generation: " + generation; + + this.generation = generation; + this.path = path; + this.channel = channel; + this.firstOperationOffset = firstOperationOffset; + } + + public long getGeneration() { + return this.generation; + } + + public abstract long sizeInBytes(); + + abstract public int totalOperations(); + + public final long getFirstOperationOffset() { + return firstOperationOffset; + } + + public Translog.Operation read(Translog.Location location) throws IOException { + assert location.generation == generation : "read location's translog generation [" + location.generation + "] is not [" + generation + "]"; + ByteBuffer buffer = ByteBuffer.allocate(location.size); + try (BufferedChecksumStreamInput checksumStreamInput = checksummedStream(buffer, location.translogLocation, location.size, null)) { + return read(checksumStreamInput); + } + } + + /** read the size of the op (i.e., number of bytes, including the op size) written at the given position */ + protected final int readSize(ByteBuffer reusableBuffer, long position) { + // read op size from disk + assert reusableBuffer.capacity() >= 4 : "reusable buffer must have capacity >=4 when reading opSize. got [" + reusableBuffer.capacity() + "]"; + try { + reusableBuffer.clear(); + reusableBuffer.limit(4); + readBytes(reusableBuffer, position); + reusableBuffer.flip(); + // Add an extra 4 to account for the operation size integer itself + final int size = reusableBuffer.getInt() + 4; + final long maxSize = sizeInBytes() - position; + if (size < 0 || size > maxSize) { + throw new TranslogCorruptedException("operation size is corrupted must be [0.." + maxSize + "] but was: " + size); + } + + return size; + } catch (IOException e) { + throw new ElasticsearchException("unexpected exception reading from translog snapshot of " + this.path, e); + } + } + + public Translog.Snapshot newSnapshot() { + return new TranslogSnapshot(generation, channel, path, firstOperationOffset, sizeInBytes(), totalOperations()); + } + + /** + * reads an operation at the given position and returns it. The buffer length is equal to the number + * of bytes reads. + */ + protected final BufferedChecksumStreamInput checksummedStream(ByteBuffer reusableBuffer, long position, int opSize, BufferedChecksumStreamInput reuse) throws IOException { + final ByteBuffer buffer; + if (reusableBuffer.capacity() >= opSize) { + buffer = reusableBuffer; + } else { + buffer = ByteBuffer.allocate(opSize); + } + buffer.clear(); + buffer.limit(opSize); + readBytes(buffer, position); + buffer.flip(); + return new BufferedChecksumStreamInput(new ByteBufferStreamInput(buffer), reuse); + } + + protected Translog.Operation read(BufferedChecksumStreamInput inStream) throws IOException { + return Translog.readOperation(inStream); + } + + /** + * reads bytes at position into the given buffer, filling it. + */ + abstract protected void readBytes(ByteBuffer buffer, long position) throws IOException; + + @Override + public String toString() { + return "translog [" + generation + "][" + path + "]"; + } + + @Override + public int compareTo(BaseTranslogReader o) { + return Long.compare(getGeneration(), o.getGeneration()); + } + + + public Path path() { + return path; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/translog/ChannelReference.java b/core/src/main/java/org/elasticsearch/index/translog/ChannelReference.java deleted file mode 100644 index b3f60a4c89f..00000000000 --- a/core/src/main/java/org/elasticsearch/index/translog/ChannelReference.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.translog; - -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.common.util.Callback; -import org.elasticsearch.common.util.concurrent.AbstractRefCounted; - -import java.io.IOException; -import java.nio.channels.FileChannel; -import java.nio.file.Path; - -final class ChannelReference extends AbstractRefCounted { - private final Path file; - private final FileChannel channel; - protected final long generation; - private final Callback onClose; - - ChannelReference(Path file, long generation, FileChannel channel, Callback onClose) throws IOException { - super(file.toString()); - this.generation = generation; - this.file = file; - this.channel = channel; - this.onClose = onClose; - } - - public long getGeneration() { - return generation; - } - - public Path getPath() { - return this.file; - } - - public FileChannel getChannel() { - return this.channel; - } - - @Override - public String toString() { - return "channel: file [" + file + "], ref count [" + refCount() + "]"; - } - - @Override - protected void closeInternal() { - try { - IOUtils.closeWhileHandlingException(channel); - } finally { - if (onClose != null) { - onClose.handle(this); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReader.java deleted file mode 100644 index 463c5998f1d..00000000000 --- a/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReader.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.translog; - -import java.io.IOException; - -/** - * Version 0 of the translog format, there is no header in this file - */ -@Deprecated -public final class LegacyTranslogReader extends LegacyTranslogReaderBase { - - /** - * Create a snapshot of translog file channel. The length parameter should be consistent with totalOperations and point - * at the end of the last operation in this snapshot. - */ - LegacyTranslogReader(long generation, ChannelReference channelReference, long fileLength) { - super(generation, channelReference, 0, fileLength); - } - - @Override - protected Translog.Operation read(BufferedChecksumStreamInput in) throws IOException { - // read the opsize before an operation. - // Note that this was written & read out side of the stream when this class was used, but it makes things more consistent - // to read this here - in.readInt(); - Translog.Operation.Type type = Translog.Operation.Type.fromId(in.readByte()); - Translog.Operation operation = Translog.newOperationFromType(type); - operation.readFrom(in); - return operation; - } - - - - @Override - protected ImmutableTranslogReader newReader(long generation, ChannelReference channelReference, long firstOperationOffset, long length, int totalOperations) { - assert totalOperations == -1 : "expected unknown but was: " + totalOperations; - assert firstOperationOffset == 0; - return new LegacyTranslogReader(generation, channelReference, length); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReaderBase.java b/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReaderBase.java deleted file mode 100644 index d9e9e17f792..00000000000 --- a/core/src/main/java/org/elasticsearch/index/translog/LegacyTranslogReaderBase.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.translog; - -import java.io.IOException; -import java.nio.ByteBuffer; - -/** - * Version 1 of the translog format, there is checkpoint and therefore no notion of op count - */ -@Deprecated -class LegacyTranslogReaderBase extends ImmutableTranslogReader { - - /** - * Create a snapshot of translog file channel. The length parameter should be consistent with totalOperations and point - * at the end of the last operation in this snapshot. - * - */ - LegacyTranslogReaderBase(long generation, ChannelReference channelReference, long firstOperationOffset, long fileLength) { - super(generation, channelReference, firstOperationOffset, fileLength, TranslogReader.UNKNOWN_OP_COUNT); - } - - - @Override - protected Translog.Snapshot newReaderSnapshot(final int totalOperations, ByteBuffer reusableBuffer) { - assert totalOperations == -1 : "legacy we had no idea how many ops: " + totalOperations; - return new ReaderSnapshot(totalOperations, reusableBuffer) { - @Override - public Translog.Operation next() throws IOException { - if (position >= sizeInBytes()) { // this is the legacy case.... - return null; - } - try { - return readOperation(); - } catch (TruncatedTranslogException ex) { - return null; // legacy case - } - } - }; - } - - @Override - protected ImmutableTranslogReader newReader(long generation, ChannelReference channelReference, long firstOperationOffset, long length, int totalOperations) { - assert totalOperations == -1 : "expected unknown but was: " + totalOperations; - return new LegacyTranslogReaderBase(generation, channelReference, firstOperationOffset, length); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java b/core/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java index b76214dc2e7..7b1a05e1ac1 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java +++ b/core/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java @@ -19,12 +19,8 @@ package org.elasticsearch.index.translog; -import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.lease.Releasables; - import java.io.IOException; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.Arrays; /** * A snapshot composed out of multiple snapshots @@ -32,8 +28,7 @@ import java.util.concurrent.atomic.AtomicBoolean; final class MultiSnapshot implements Translog.Snapshot { private final Translog.Snapshot[] translogs; - private AtomicBoolean closed = new AtomicBoolean(false); - private final int estimatedTotalOperations; + private final int totalOperations; private int index; /** @@ -41,30 +36,18 @@ final class MultiSnapshot implements Translog.Snapshot { */ MultiSnapshot(Translog.Snapshot[] translogs) { this.translogs = translogs; - int ops = 0; - for (Translog.Snapshot translog : translogs) { - - final int tops = translog.estimatedTotalOperations(); - if (tops == TranslogReader.UNKNOWN_OP_COUNT) { - ops = TranslogReader.UNKNOWN_OP_COUNT; - break; - } - assert tops >= 0 : "tops must be positive but was: " + tops; - ops += tops; - } - estimatedTotalOperations = ops; + totalOperations = Arrays.stream(translogs).mapToInt(Translog.Snapshot::totalOperations).sum(); index = 0; } @Override - public int estimatedTotalOperations() { - return estimatedTotalOperations; + public int totalOperations() { + return totalOperations; } @Override public Translog.Operation next() throws IOException { - ensureOpen(); for (; index < translogs.length; index++) { final Translog.Snapshot current = translogs[index]; Translog.Operation op = current.next(); @@ -74,17 +57,4 @@ final class MultiSnapshot implements Translog.Snapshot { } return null; } - - protected void ensureOpen() { - if (closed.get()) { - throw new AlreadyClosedException("snapshot already closed"); - } - } - - @Override - public void close() throws ElasticsearchException { - if (closed.compareAndSet(false, true)) { - Releasables.close(translogs); - } - } } diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 0001fda0752..b2e81de044b 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -34,12 +34,9 @@ import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.ReleasableLock; @@ -53,7 +50,6 @@ import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; -import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; @@ -69,6 +65,8 @@ import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * A Translog is a per index shard component that records all non-committed index operations in a durable manner. @@ -112,29 +110,25 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC static final Pattern PARSE_STRICT_ID_PATTERN = Pattern.compile("^" + TRANSLOG_FILE_PREFIX + "(\\d+)(\\.tlog)$"); - private final List recoveredTranslogs; + // the list of translog readers is guaranteed to be in order of translog generation + private final List readers = new ArrayList<>(); private volatile ScheduledFuture syncScheduler; // this is a concurrent set and is not protected by any of the locks. The main reason - // is that is being accessed by two separate classes (additions & reading are done by FsTranslog, remove by FsView when closed) + // is that is being accessed by two separate classes (additions & reading are done by Translog, remove by View when closed) private final Set outstandingViews = ConcurrentCollections.newConcurrentSet(); private BigArrays bigArrays; protected final ReleasableLock readLock; protected final ReleasableLock writeLock; private final Path location; private TranslogWriter current; - private volatile ImmutableTranslogReader currentCommittingTranslog; - private volatile long lastCommittedTranslogFileGeneration = -1; // -1 is safe as it will not cause an translog deletion. + + private final static long NOT_SET_GENERATION = -1; // -1 is safe as it will not cause a translog deletion. + + private volatile long currentCommittingGeneration = NOT_SET_GENERATION; + private volatile long lastCommittedTranslogFileGeneration = NOT_SET_GENERATION; private final AtomicBoolean closed = new AtomicBoolean(); private final TranslogConfig config; private final String translogUUID; - private Callback onViewClose = new Callback() { - @Override - public void handle(View view) { - logger.trace("closing view starting at translog [{}]", view.minTranslogGeneration()); - boolean removed = outstandingViews.remove(view); - assert removed : "View was never set but was supposed to be removed"; - } - }; /** @@ -176,11 +170,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC // if not we don't even try to clean it up and wait until we fail creating it assert Files.exists(nextTranslogFile) == false || Files.size(nextTranslogFile) <= TranslogWriter.getHeaderLength(translogUUID) : "unexpected translog file: [" + nextTranslogFile + "]"; if (Files.exists(currentCheckpointFile) // current checkpoint is already copied - && Files.deleteIfExists(nextTranslogFile)) { // delete it and log a warning + && Files.deleteIfExists(nextTranslogFile)) { // delete it and log a warning logger.warn("deleted previously created, but not yet committed, next generation [{}]. This can happen due to a tragic exception when creating a new generation", nextTranslogFile.getFileName()); } - this.recoveredTranslogs = recoverFromFiles(translogGeneration, checkpoint); - if (recoveredTranslogs.isEmpty()) { + this.readers.addAll(recoverFromFiles(translogGeneration, checkpoint)); + if (readers.isEmpty()) { throw new IllegalStateException("at least one reader must be recovered"); } boolean success = false; @@ -193,11 +187,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC // for instance if we have a lot of tlog and we can't create the writer we keep on holding // on to all the uncommitted tlog files if we don't close if (success == false) { - IOUtils.closeWhileHandlingException(recoveredTranslogs); + IOUtils.closeWhileHandlingException(readers); } } } else { - this.recoveredTranslogs = Collections.emptyList(); IOUtils.rm(location); logger.debug("wipe translog location - creating new translog"); Files.createDirectories(location); @@ -205,21 +198,22 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC Checkpoint checkpoint = new Checkpoint(0, 0, generation); Checkpoint.write(location.resolve(CHECKPOINT_FILE_NAME), checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW); current = createWriter(generation); - this.lastCommittedTranslogFileGeneration = -1; // playing safe + this.lastCommittedTranslogFileGeneration = NOT_SET_GENERATION; } // now that we know which files are there, create a new current one. } catch (Throwable t) { // close the opened translog files if we fail to create a new translog... - IOUtils.closeWhileHandlingException(currentCommittingTranslog, current); + IOUtils.closeWhileHandlingException(current); + IOUtils.closeWhileHandlingException(readers); throw t; } } /** recover all translog files found on disk */ - private final ArrayList recoverFromFiles(TranslogGeneration translogGeneration, Checkpoint checkpoint) throws IOException { + private final ArrayList recoverFromFiles(TranslogGeneration translogGeneration, Checkpoint checkpoint) throws IOException { boolean success = false; - ArrayList foundTranslogs = new ArrayList<>(); + ArrayList foundTranslogs = new ArrayList<>(); final Path tempFile = Files.createTempFile(location, TRANSLOG_FILE_PREFIX, TRANSLOG_FILE_SUFFIX); // a temp file to copy checkpoint to - note it must be in on the same FS otherwise atomic move won't work boolean tempFileRenamed = false; try (ReleasableLock lock = writeLock.acquire()) { @@ -230,7 +224,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (Files.exists(committedTranslogFile) == false) { throw new IllegalStateException("translog file doesn't exist with generation: " + i + " lastCommitted: " + lastCommittedTranslogFileGeneration + " checkpoint: " + checkpoint.generation + " - translog ids must be consecutive"); } - final ImmutableTranslogReader reader = openReader(committedTranslogFile, Checkpoint.read(location.resolve(getCommitCheckpointFileName(i)))); + final TranslogReader reader = openReader(committedTranslogFile, Checkpoint.read(location.resolve(getCommitCheckpointFileName(i)))); foundTranslogs.add(reader); logger.debug("recovered local translog from checkpoint {}", checkpoint); } @@ -267,17 +261,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return foundTranslogs; } - ImmutableTranslogReader openReader(Path path, Checkpoint checkpoint) throws IOException { - final long generation; - try { - generation = parseIdFromFileName(path); - } catch (IllegalArgumentException ex) { - throw new TranslogException(shardId, "failed to parse generation from file name matching pattern " + path, ex); - } + TranslogReader openReader(Path path, Checkpoint checkpoint) throws IOException { FileChannel channel = FileChannel.open(path, StandardOpenOption.READ); try { - final ChannelReference raf = new ChannelReference(path, generation, channel, new OnCloseRunnable()); - ImmutableTranslogReader reader = ImmutableTranslogReader.open(raf, checkpoint, translogUUID); + assert Translog.parseIdFromFileName(path) == checkpoint.generation : "expected generation: " + Translog.parseIdFromFileName(path) + " but got: " + checkpoint.generation; + TranslogReader reader = TranslogReader.open(channel, path, checkpoint, translogUUID); channel = null; return reader; } finally { @@ -315,12 +303,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC try { current.sync(); } finally { - try { - IOUtils.close(current, currentCommittingTranslog); - } finally { - IOUtils.close(recoveredTranslogs); - recoveredTranslogs.clear(); - } + closeFilesIfNoPendingViews(); } } finally { FutureUtils.cancel(syncScheduler); @@ -349,41 +332,49 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC /** * Returns the number of operations in the transaction files that aren't committed to lucene.. - * Note: may return -1 if unknown */ public int totalOperations() { - int ops = 0; - try (ReleasableLock lock = readLock.acquire()) { - ops += current.totalOperations(); - if (currentCommittingTranslog != null) { - int tops = currentCommittingTranslog.totalOperations(); - assert tops != TranslogReader.UNKNOWN_OP_COUNT; - assert tops >= 0; - ops += tops; - } - } - return ops; + return totalOperations(lastCommittedTranslogFileGeneration); } /** * Returns the size in bytes of the translog files that aren't committed to lucene. */ public long sizeInBytes() { - long size = 0; - try (ReleasableLock lock = readLock.acquire()) { - size += current.sizeInBytes(); - if (currentCommittingTranslog != null) { - size += currentCommittingTranslog.sizeInBytes(); - } + return sizeInBytes(lastCommittedTranslogFileGeneration); + } + + /** + * Returns the number of operations in the transaction files that aren't committed to lucene.. + */ + private int totalOperations(long minGeneration) { + try (ReleasableLock ignored = readLock.acquire()) { + ensureOpen(); + return Stream.concat(readers.stream(), Stream.of(current)) + .filter(r -> r.getGeneration() >= minGeneration) + .mapToInt(BaseTranslogReader::totalOperations) + .sum(); + } + } + + /** + * Returns the size in bytes of the translog files that aren't committed to lucene. + */ + private long sizeInBytes(long minGeneration) { + try (ReleasableLock ignored = readLock.acquire()) { + ensureOpen(); + return Stream.concat(readers.stream(), Stream.of(current)) + .filter(r -> r.getGeneration() >= minGeneration) + .mapToLong(BaseTranslogReader::sizeInBytes) + .sum(); } - return size; } TranslogWriter createWriter(long fileGeneration) throws IOException { TranslogWriter newFile; try { - newFile = TranslogWriter.create(shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), new OnCloseRunnable(), getChannelFactory(), config.getBufferSize()); + newFile = TranslogWriter.create(shardId, translogUUID, fileGeneration, location.resolve(getFilename(fileGeneration)), getChannelFactory(), config.getBufferSize()); } catch (IOException e) { throw new TranslogException(shardId, "failed to create new translog file", e); } @@ -398,12 +389,12 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC */ public Translog.Operation read(Location location) { try (ReleasableLock lock = readLock.acquire()) { - final TranslogReader reader; + final BaseTranslogReader reader; final long currentGeneration = current.getGeneration(); if (currentGeneration == location.generation) { reader = current; - } else if (currentCommittingTranslog != null && currentCommittingTranslog.getGeneration() == location.generation) { - reader = currentCommittingTranslog; + } else if (readers.isEmpty() == false && readers.get(readers.size() - 1).getGeneration() == location.generation) { + reader = readers.get(readers.size() - 1); } else if (currentGeneration < location.generation) { throw new IllegalStateException("location generation [" + location.generation + "] is greater than the current generation [" + currentGeneration + "]"); } else { @@ -467,33 +458,16 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * Snapshots are fixed in time and will not be updated with future operations. */ public Snapshot newSnapshot() { - ensureOpen(); - try (ReleasableLock lock = readLock.acquire()) { - ArrayList toOpen = new ArrayList<>(); - toOpen.addAll(recoveredTranslogs); - if (currentCommittingTranslog != null) { - toOpen.add(currentCommittingTranslog); - } - toOpen.add(current); - return createSnapshot(toOpen.toArray(new TranslogReader[toOpen.size()])); - } + return createSnapshot(Long.MIN_VALUE); } - private static Snapshot createSnapshot(TranslogReader... translogs) { - Snapshot[] snapshots = new Snapshot[translogs.length]; - boolean success = false; - try { - for (int i = 0; i < translogs.length; i++) { - snapshots[i] = translogs[i].newSnapshot(); - } - - Snapshot snapshot = new MultiSnapshot(snapshots); - success = true; - return snapshot; - } finally { - if (success == false) { - Releasables.close(snapshots); - } + private Snapshot createSnapshot(long minGeneration) { + try (ReleasableLock ignored = readLock.acquire()) { + ensureOpen(); + Snapshot[] snapshots = Stream.concat(readers.stream(), Stream.of(current)) + .filter(reader -> reader.getGeneration() >= minGeneration) + .map(BaseTranslogReader::newSnapshot).toArray(Snapshot[]::new); + return new MultiSnapshot(snapshots); } } @@ -502,25 +476,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * while receiving future ones as well */ public Translog.View newView() { - // we need to acquire the read lock to make sure no new translog is created - // and will be missed by the view we're making try (ReleasableLock lock = readLock.acquire()) { - ArrayList translogs = new ArrayList<>(); - try { - if (currentCommittingTranslog != null) { - translogs.add(currentCommittingTranslog.clone()); - } - translogs.add(current.newReaderFromWriter()); - View view = new View(translogs, onViewClose); - // this is safe as we know that no new translog is being made at the moment - // (we hold a read lock) and the view will be notified of any future one - outstandingViews.add(view); - translogs.clear(); - return view; - } finally { - // close if anything happend and we didn't reach the clear - IOUtils.closeWhileHandlingException(translogs); - } + ensureOpen(); + View view = new View(lastCommittedTranslogFileGeneration); + outstandingViews.add(view); + return view; } } @@ -561,7 +521,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC */ public boolean ensureSynced(Location location) throws IOException { try (ReleasableLock lock = readLock.acquire()) { - if (location.generation == current.generation) { // if we have a new one it's already synced + if (location.generation == current.getGeneration()) { // if we have a new one it's already synced ensureOpen(); return current.syncUpTo(location.translogLocation + location.size); } @@ -604,151 +564,67 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return config; } - - private final class OnCloseRunnable implements Callback { - @Override - public void handle(ChannelReference channelReference) { - if (isReferencedGeneration(channelReference.getGeneration()) == false) { - Path translogPath = channelReference.getPath(); - assert channelReference.getPath().getParent().equals(location) : "translog files must be in the location folder: " + location + " but was: " + translogPath; - // if the given translogPath is not the current we can safely delete the file since all references are released - logger.trace("delete translog file - not referenced and not current anymore {}", translogPath); - IOUtils.deleteFilesIgnoringExceptions(translogPath); - IOUtils.deleteFilesIgnoringExceptions(translogPath.resolveSibling(getCommitCheckpointFileName(channelReference.getGeneration()))); - - } - try (DirectoryStream stream = Files.newDirectoryStream(location)) { - for (Path path : stream) { - Matcher matcher = PARSE_STRICT_ID_PATTERN.matcher(path.getFileName().toString()); - if (matcher.matches()) { - long generation = Long.parseLong(matcher.group(1)); - if (isReferencedGeneration(generation) == false) { - logger.trace("delete translog file - not referenced and not current anymore {}", path); - IOUtils.deleteFilesIgnoringExceptions(path); - IOUtils.deleteFilesIgnoringExceptions(path.resolveSibling(getCommitCheckpointFileName(channelReference.getGeneration()))); - } - } - } - } catch (IOException e) { - logger.warn("failed to delete unreferenced translog files", e); - } - } - } - /** * a view into the translog, capturing all translog file at the moment of creation * and updated with any future translog. */ - public static final class View implements Closeable { - public static final Translog.View EMPTY_VIEW = new View(Collections.emptyList(), null); + /** + * a view into the translog, capturing all translog file at the moment of creation + * and updated with any future translog. + */ + public class View implements Closeable { - boolean closed; - // last in this list is always FsTranslog.current - final List orderedTranslogs; - private final Callback onClose; + AtomicBoolean closed = new AtomicBoolean(); + final long minGeneration; - View(List orderedTranslogs, Callback onClose) { - // clone so we can safely mutate.. - this.orderedTranslogs = new ArrayList<>(orderedTranslogs); - this.onClose = onClose; - } - - /** - * Called by the parent class when ever the current translog changes - * - * @param oldCurrent a new read only reader for the old current (should replace the previous reference) - * @param newCurrent a reader into the new current. - */ - synchronized void onNewTranslog(TranslogReader oldCurrent, TranslogReader newCurrent) throws IOException { - // even though the close method removes this view from outstandingViews, there is no synchronisation in place - // between that operation and an ongoing addition of a new translog, already having an iterator. - // As such, this method can be called despite of the fact that we are closed. We need to check and ignore. - if (closed) { - // we have to close the new references created for as as we will not hold them - IOUtils.close(oldCurrent, newCurrent); - return; - } - orderedTranslogs.remove(orderedTranslogs.size() - 1).close(); - orderedTranslogs.add(oldCurrent); - orderedTranslogs.add(newCurrent); + View(long minGeneration) { + this.minGeneration = minGeneration; } /** this smallest translog generation in this view */ - public synchronized long minTranslogGeneration() { - ensureOpen(); - return orderedTranslogs.get(0).getGeneration(); + public long minTranslogGeneration() { + return minGeneration; } /** * The total number of operations in the view. */ - public synchronized int totalOperations() { - int ops = 0; - for (TranslogReader translog : orderedTranslogs) { - int tops = translog.totalOperations(); - if (tops == TranslogReader.UNKNOWN_OP_COUNT) { - return -1; - } - assert tops >= 0; - ops += tops; - } - return ops; + public int totalOperations() { + return Translog.this.totalOperations(minGeneration); } /** * Returns the size in bytes of the files behind the view. */ - public synchronized long sizeInBytes() { - long size = 0; - for (TranslogReader translog : orderedTranslogs) { - size += translog.sizeInBytes(); - } - return size; + public long sizeInBytes() { + return Translog.this.sizeInBytes(minGeneration); } /** create a snapshot from this view */ - public synchronized Snapshot snapshot() { + public Snapshot snapshot() { ensureOpen(); - return createSnapshot(orderedTranslogs.toArray(new TranslogReader[orderedTranslogs.size()])); + return Translog.this.createSnapshot(minGeneration); } - void ensureOpen() { - if (closed) { - throw new ElasticsearchException("View is already closed"); + if (closed.get()) { + throw new AlreadyClosedException("View is already closed"); } } @Override - public void close() { - final List toClose = new ArrayList<>(); - try { - synchronized (this) { - if (closed == false) { - try { - if (onClose != null) { - onClose.handle(this); - } - } finally { - closed = true; - toClose.addAll(orderedTranslogs); - orderedTranslogs.clear(); - } - } - } - } finally { - try { - // Close out of lock to prevent deadlocks between channel close which checks for - // references in InternalChannelReference.closeInternal (waiting on a read lock) - // and other FsTranslog#newTranslog calling FsView.onNewTranslog (while having a write lock) - IOUtils.close(toClose); - } catch (Exception e) { - throw new ElasticsearchException("failed to close view", e); - } + public void close() throws IOException { + if (closed.getAndSet(true) == false) { + logger.trace("closing view starting at translog [{}]", minTranslogGeneration()); + boolean removed = outstandingViews.remove(this); + assert removed : "View was never set but was supposed to be removed"; + trimUnreferencedReaders(); + closeFilesIfNoPendingViews(); } } } + public static class Location implements Accountable, Comparable { public final long generation; @@ -817,12 +693,12 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC /** * A snapshot of the transaction log, allows to iterate over all the transaction log operations. */ - public interface Snapshot extends Releasable { + public interface Snapshot { /** * The total number of operations in the translog. */ - int estimatedTotalOperations(); + int totalOperations(); /** * Returns the next operation in the snapshot or null if we reached the end. @@ -1320,13 +1196,12 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC public void prepareCommit() throws IOException { try (ReleasableLock lock = writeLock.acquire()) { ensureOpen(); - if (currentCommittingTranslog != null) { - throw new IllegalStateException("already committing a translog with generation: " + currentCommittingTranslog.getGeneration()); + if (currentCommittingGeneration != NOT_SET_GENERATION) { + throw new IllegalStateException("already committing a translog with generation: " + currentCommittingGeneration); } - final TranslogWriter oldCurrent = current; - oldCurrent.ensureOpen(); - oldCurrent.sync(); - currentCommittingTranslog = current.immutableReader(); + currentCommittingGeneration = current.getGeneration(); + TranslogReader currentCommittingTranslog = current.closeIntoReader(); + readers.add(currentCommittingTranslog); Path checkpoint = location.resolve(CHECKPOINT_FILE_NAME); assert Checkpoint.read(checkpoint).generation == currentCommittingTranslog.getGeneration(); Path commitCheckpoint = location.resolve(getCommitCheckpointFileName(currentCommittingTranslog.getGeneration())); @@ -1335,14 +1210,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC IOUtils.fsync(commitCheckpoint.getParent(), true); // create a new translog file - this will sync it and update the checkpoint data; current = createWriter(current.getGeneration() + 1); - // notify all outstanding views of the new translog (no views are created now as - // we hold a write lock). - for (View view : outstandingViews) { - view.onNewTranslog(currentCommittingTranslog.clone(), current.newReaderFromWriter()); - } - IOUtils.close(oldCurrent); logger.trace("current translog set to [{}]", current.getGeneration()); - assert oldCurrent.syncNeeded() == false : "old translog oldCurrent must not need a sync"; } catch (Throwable t) { IOUtils.closeWhileHandlingException(this); // tragic event @@ -1352,24 +1220,53 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC @Override public void commit() throws IOException { - ImmutableTranslogReader toClose = null; try (ReleasableLock lock = writeLock.acquire()) { ensureOpen(); - if (currentCommittingTranslog == null) { + if (currentCommittingGeneration == NOT_SET_GENERATION) { prepareCommit(); } + assert currentCommittingGeneration != NOT_SET_GENERATION; + assert readers.stream().filter(r -> r.getGeneration() == currentCommittingGeneration).findFirst().isPresent() + : "reader list doesn't contain committing generation [" + currentCommittingGeneration + "]"; lastCommittedTranslogFileGeneration = current.getGeneration(); // this is important - otherwise old files will not be cleaned up - if (recoveredTranslogs.isEmpty() == false) { - IOUtils.close(recoveredTranslogs); - recoveredTranslogs.clear(); - } - toClose = this.currentCommittingTranslog; - this.currentCommittingTranslog = null; - } finally { - IOUtils.close(toClose); + currentCommittingGeneration = NOT_SET_GENERATION; + trimUnreferencedReaders(); } } + void trimUnreferencedReaders() { + try (ReleasableLock ignored = writeLock.acquire()) { + if (closed.get()) { + // we're shutdown potentially on some tragic event - don't delete anything + return; + } + long minReferencedGen = outstandingViews.stream().mapToLong(View::minTranslogGeneration).min().orElse(Long.MAX_VALUE); + minReferencedGen = Math.min(lastCommittedTranslogFileGeneration, minReferencedGen); + final long finalMinReferencedGen = minReferencedGen; + List unreferenced = readers.stream().filter(r -> r.getGeneration() < finalMinReferencedGen).collect(Collectors.toList()); + for (final TranslogReader unreferencedReader : unreferenced) { + Path translogPath = unreferencedReader.path(); + logger.trace("delete translog file - not referenced and not current anymore {}", translogPath); + IOUtils.closeWhileHandlingException(unreferencedReader); + IOUtils.deleteFilesIgnoringExceptions(translogPath, + translogPath.resolveSibling(getCommitCheckpointFileName(unreferencedReader.getGeneration()))); + } + readers.removeAll(unreferenced); + } + } + + void closeFilesIfNoPendingViews() throws IOException { + try (ReleasableLock ignored = writeLock.acquire()) { + if (closed.get() && outstandingViews.isEmpty()) { + logger.trace("closing files. translog is closed and there are no pending views"); + ArrayList toClose = new ArrayList<>(readers); + toClose.add(current); + IOUtils.close(toClose); + } + } + } + + @Override public void rollback() throws IOException { ensureOpen(); @@ -1435,9 +1332,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return TranslogWriter.ChannelFactory.DEFAULT; } - /** If this {@code Translog} was closed as a side-effect of a tragic exception, - * e.g. disk full while flushing a new segment, this returns the root cause exception. - * Otherwise (no tragic exception has occurred) it returns null. */ + /** + * If this {@code Translog} was closed as a side-effect of a tragic exception, + * e.g. disk full while flushing a new segment, this returns the root cause exception. + * Otherwise (no tragic exception has occurred) it returns null. + */ public Throwable getTragicException() { return current.getTragicException(); } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java index 71dff6ec36e..ecc3822361c 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogReader.java @@ -27,161 +27,46 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.InputStreamDataInput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.ByteBufferStreamInput; +import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import java.io.Closeable; +import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; -import java.nio.channels.Channels; import java.nio.channels.FileChannel; -import java.nio.file.Files; import java.nio.file.Path; import java.util.concurrent.atomic.AtomicBoolean; /** - * A base class for all classes that allows reading ops from translog files + * an immutable translog filereader */ -public abstract class TranslogReader implements Closeable, Comparable { - public static final int UNKNOWN_OP_COUNT = -1; +public class TranslogReader extends BaseTranslogReader implements Closeable { private static final byte LUCENE_CODEC_HEADER_BYTE = 0x3f; private static final byte UNVERSIONED_TRANSLOG_HEADER_BYTE = 0x00; - protected final long generation; - protected final ChannelReference channelReference; - protected final FileChannel channel; + + private final int totalOperations; + protected final long length; protected final AtomicBoolean closed = new AtomicBoolean(false); - protected final long firstOperationOffset; - public TranslogReader(long generation, ChannelReference channelReference, long firstOperationOffset) { - this.generation = generation; - this.channelReference = channelReference; - this.channel = channelReference.getChannel(); - this.firstOperationOffset = firstOperationOffset; - } - - public long getGeneration() { - return this.generation; - } - - public abstract long sizeInBytes(); - - abstract public int totalOperations(); - - public final long getFirstOperationOffset() { - return firstOperationOffset; - } - - public Translog.Operation read(Translog.Location location) throws IOException { - assert location.generation == generation : "read location's translog generation [" + location.generation + "] is not [" + generation + "]"; - ByteBuffer buffer = ByteBuffer.allocate(location.size); - try (BufferedChecksumStreamInput checksumStreamInput = checksummedStream(buffer, location.translogLocation, location.size, null)) { - return read(checksumStreamInput); - } - } - - /** read the size of the op (i.e., number of bytes, including the op size) written at the given position */ - private final int readSize(ByteBuffer reusableBuffer, long position) { - // read op size from disk - assert reusableBuffer.capacity() >= 4 : "reusable buffer must have capacity >=4 when reading opSize. got [" + reusableBuffer.capacity() + "]"; - try { - reusableBuffer.clear(); - reusableBuffer.limit(4); - readBytes(reusableBuffer, position); - reusableBuffer.flip(); - // Add an extra 4 to account for the operation size integer itself - final int size = reusableBuffer.getInt() + 4; - final long maxSize = sizeInBytes() - position; - if (size < 0 || size > maxSize) { - throw new TranslogCorruptedException("operation size is corrupted must be [0.." + maxSize + "] but was: " + size); - } - - return size; - } catch (IOException e) { - throw new ElasticsearchException("unexpected exception reading from translog snapshot of " + this.channelReference.getPath(), e); - } - } - - public Translog.Snapshot newSnapshot() { - final ByteBuffer reusableBuffer = ByteBuffer.allocate(1024); - final int totalOperations = totalOperations(); - channelReference.incRef(); - return newReaderSnapshot(totalOperations, reusableBuffer); + /** + * Create a reader of translog file channel. The length parameter should be consistent with totalOperations and point + * at the end of the last operation in this snapshot. + */ + public TranslogReader(long generation, FileChannel channel, Path path, long firstOperationOffset, long length, int totalOperations) { + super(generation, channel, path, firstOperationOffset); + this.length = length; + this.totalOperations = totalOperations; } /** - * reads an operation at the given position and returns it. The buffer length is equal to the number - * of bytes reads. + * Given a file, opens an {@link TranslogReader}, taking of checking and validating the file header. */ - private final BufferedChecksumStreamInput checksummedStream(ByteBuffer reusableBuffer, long position, int opSize, BufferedChecksumStreamInput reuse) throws IOException { - final ByteBuffer buffer; - if (reusableBuffer.capacity() >= opSize) { - buffer = reusableBuffer; - } else { - buffer = ByteBuffer.allocate(opSize); - } - buffer.clear(); - buffer.limit(opSize); - readBytes(buffer, position); - buffer.flip(); - return new BufferedChecksumStreamInput(new ByteBufferStreamInput(buffer), reuse); - } - - protected Translog.Operation read(BufferedChecksumStreamInput inStream) throws IOException { - return Translog.readOperation(inStream); - } - - /** - * reads bytes at position into the given buffer, filling it. - */ - abstract protected void readBytes(ByteBuffer buffer, long position) throws IOException; - - @Override - public final void close() throws IOException { - if (closed.compareAndSet(false, true)) { - channelReference.decRef(); - } - } - - protected final boolean isClosed() { - return closed.get(); - } - - protected void ensureOpen() { - if (isClosed()) { - throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed"); - } - } - - @Override - public String toString() { - return "translog [" + generation + "][" + channelReference.getPath() + "]"; - } - - @Override - public int compareTo(TranslogReader o) { - return Long.compare(getGeneration(), o.getGeneration()); - } - - - /** - * Given a file, return a VersionedTranslogStream based on an - * optionally-existing header in the file. If the file does not exist, or - * has zero length, returns the latest version. If the header does not - * exist, assumes Version 0 of the translog file format. - */ - public static ImmutableTranslogReader open(ChannelReference channelReference, Checkpoint checkpoint, String translogUUID) throws IOException { - final FileChannel channel = channelReference.getChannel(); - final Path path = channelReference.getPath(); - assert channelReference.getGeneration() == checkpoint.generation : "expected generation: " + channelReference.getGeneration() + " but got: " + checkpoint.generation; + public static TranslogReader open(FileChannel channel, Path path, Checkpoint checkpoint, String translogUUID) throws IOException { try { - if (checkpoint.offset == 0 && checkpoint.numOps == TranslogReader.UNKNOWN_OP_COUNT) { // only old files can be empty - return new LegacyTranslogReader(channelReference.getGeneration(), channelReference, 0); - } - - InputStreamStreamInput headerStream = new InputStreamStreamInput(Channels.newInputStream(channel)); // don't close + InputStreamStreamInput headerStream = new InputStreamStreamInput(java.nio.channels.Channels.newInputStream(channel)); // don't close // Lucene's CodecUtil writes a magic number of 0x3FD76C17 with the // header, in binary this looks like: // @@ -208,20 +93,17 @@ public abstract class TranslogReader implements Closeable, Comparable TranslogReader.UNKNOWN_OP_COUNT: "expected at least 0 operatin but got: " + checkpoint.numOps; + assert checkpoint.numOps >= 0 : "expected at least 0 operatin but got: " + checkpoint.numOps; assert checkpoint.offset <= channel.size() : "checkpoint is inconsistent with channel length: " + channel.size() + " " + checkpoint; int len = headerStream.readInt(); if (len > channel.size()) { @@ -232,78 +114,61 @@ public abstract class TranslogReader implements Closeable, Comparable= length) { + throw new EOFException("read requested past EOF. pos [" + position + "] end: [" + length + "]"); } - - @Override - public final int estimatedTotalOperations() { - return totalOperations; + if (position < firstOperationOffset) { + throw new IOException("read requested before position of first ops. pos [" + position + "] first op on: [" + firstOperationOffset + "]"); } + Channels.readFromFileChannelWithEofException(channel, position, buffer); + } - @Override - public Translog.Operation next() throws IOException { - if (readOperations < totalOperations) { - assert readOperations < totalOperations : "readOpeartions must be less than totalOperations"; - return readOperation(); - } else { - return null; - } + public Checkpoint getInfo() { + return new Checkpoint(length, totalOperations, getGeneration()); + } + + @Override + public final void close() throws IOException { + if (closed.compareAndSet(false, true)) { + channel.close(); } + } - protected final Translog.Operation readOperation() throws IOException { - final int opSize = readSize(reusableBuffer, position); - reuse = checksummedStream(reusableBuffer, position, opSize, reuse); - Translog.Operation op = read(reuse); - position += opSize; - readOperations++; - return op; - } + protected final boolean isClosed() { + return closed.get(); + } - @Override - public void close() { - if (closed.compareAndSet(false, true)) { - channelReference.decRef(); - } + protected void ensureOpen() { + if (isClosed()) { + throw new AlreadyClosedException(toString() + " is already closed"); } } } diff --git a/core/src/main/java/org/elasticsearch/index/translog/ImmutableTranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java similarity index 50% rename from core/src/main/java/org/elasticsearch/index/translog/ImmutableTranslogReader.java rename to core/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java index 1d6d3b45a63..10f381f8eba 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/ImmutableTranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - package org.elasticsearch.index.translog; import org.elasticsearch.common.io.Channels; @@ -24,68 +23,82 @@ import org.elasticsearch.common.io.Channels; import java.io.EOFException; import java.io.IOException; import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.file.Path; -/** - * a translog reader which is fixed in length - */ -public class ImmutableTranslogReader extends TranslogReader { - +public class TranslogSnapshot extends BaseTranslogReader implements Translog.Snapshot { private final int totalOperations; protected final long length; + private final ByteBuffer reusableBuffer; + private long position; + private int readOperations; + private BufferedChecksumStreamInput reuse; + + /** * Create a snapshot of translog file channel. The length parameter should be consistent with totalOperations and point * at the end of the last operation in this snapshot. */ - public ImmutableTranslogReader(long generation, ChannelReference channelReference, long firstOperationOffset, long length, int totalOperations) { - super(generation, channelReference, firstOperationOffset); + public TranslogSnapshot(long generation, FileChannel channel, Path path, long firstOperationOffset, long length, int totalOperations) { + super(generation, channel, path, firstOperationOffset); this.length = length; this.totalOperations = totalOperations; + this.reusableBuffer = ByteBuffer.allocate(1024); + readOperations = 0; + position = firstOperationOffset; + reuse = null; } @Override - public final TranslogReader clone() { - if (channelReference.tryIncRef()) { - try { - ImmutableTranslogReader reader = newReader(generation, channelReference, firstOperationOffset, length, totalOperations); - channelReference.incRef(); // for the new object - return reader; - } finally { - channelReference.decRef(); - } + public final int totalOperations() { + return totalOperations; + } + + @Override + public Translog.Operation next() throws IOException { + if (readOperations < totalOperations) { + return readOperation(); } else { - throw new IllegalStateException("can't increment translog [" + generation + "] channel ref count"); + return null; } } - - protected ImmutableTranslogReader newReader(long generation, ChannelReference channelReference, long offset, long length, int totalOperations) { - return new ImmutableTranslogReader(generation, channelReference, offset, length, totalOperations); + protected final Translog.Operation readOperation() throws IOException { + final int opSize = readSize(reusableBuffer, position); + reuse = checksummedStream(reusableBuffer, position, opSize, reuse); + Translog.Operation op = read(reuse); + position += opSize; + readOperations++; + return op; } + public long sizeInBytes() { return length; } - public int totalOperations() { - return totalOperations; - } - /** * reads an operation at the given position into the given buffer. */ protected void readBytes(ByteBuffer buffer, long position) throws IOException { if (position >= length) { - throw new EOFException("read requested past EOF. pos [" + position + "] end: [" + length + "]"); + throw new EOFException("read requested past EOF. pos [" + position + "] end: [" + length + "], generation: [" + getGeneration() + "], path: [" + path + "]"); } - if (position < firstOperationOffset) { - throw new IOException("read requested before position of first ops. pos [" + position + "] first op on: [" + firstOperationOffset + "]"); + if (position < getFirstOperationOffset()) { + throw new IOException("read requested before position of first ops. pos [" + position + "] first op on: [" + getFirstOperationOffset() + "], generation: [" + getGeneration() + "], path: [" + path + "]"); } Channels.readFromFileChannelWithEofException(channel, position, buffer); } - public Checkpoint getInfo() { - return new Checkpoint(length, totalOperations, getGeneration()); + @Override + public String toString() { + return "TranslogSnapshot{" + + "readOperations=" + readOperations + + ", position=" + position + + ", totalOperations=" + totalOperations + + ", length=" + length + + ", reusableBuffer=" + reusableBuffer + + '}'; } - -} +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 517e4a5b30e..a1fc708ddaf 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -25,24 +25,23 @@ import org.apache.lucene.store.OutputStreamDataOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.util.Callback; import org.elasticsearch.index.shard.ShardId; import java.io.BufferedOutputStream; +import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; -import java.nio.file.Files; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import java.util.concurrent.atomic.AtomicBoolean; -public class TranslogWriter extends TranslogReader { +public class TranslogWriter extends BaseTranslogReader implements Closeable { public static final String TRANSLOG_CODEC = "translog"; public static final int VERSION_CHECKSUMS = 1; @@ -61,11 +60,14 @@ public class TranslogWriter extends TranslogReader { /* the total offset of this file including the bytes written to the file as well as into the buffer */ private volatile long totalOffset; - public TranslogWriter(ShardId shardId, long generation, ChannelReference channelReference, ByteSizeValue bufferSize) throws IOException { - super(generation, channelReference, channelReference.getChannel().position()); + protected final AtomicBoolean closed = new AtomicBoolean(false); + + + public TranslogWriter(ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException { + super(generation, channel, path, channel.position()); this.shardId = shardId; - this.outputStream = new BufferedChannelOutputStream(java.nio.channels.Channels.newOutputStream(channelReference.getChannel()), bufferSize.bytesAsInt()); - this.lastSyncedOffset = channelReference.getChannel().position(); + this.outputStream = new BufferedChannelOutputStream(java.nio.channels.Channels.newOutputStream(channel), bufferSize.bytesAsInt()); + this.lastSyncedOffset = channel.position(); totalOffset = lastSyncedOffset; } @@ -74,10 +76,10 @@ public class TranslogWriter extends TranslogReader { } private static int getHeaderLength(int uuidLength) { - return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + RamUsageEstimator.NUM_BYTES_INT; + return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + RamUsageEstimator.NUM_BYTES_INT; } - public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, Callback onClose, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException { + public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException { final BytesRef ref = new BytesRef(translogUUID); final int headerLength = getHeaderLength(ref.length); final FileChannel channel = channelFactory.open(file); @@ -90,7 +92,7 @@ public class TranslogWriter extends TranslogReader { out.writeBytes(ref.bytes, ref.offset, ref.length); channel.force(true); writeCheckpoint(headerLength, 0, file.getParent(), fileGeneration, StandardOpenOption.WRITE); - final TranslogWriter writer = new TranslogWriter(shardId, fileGeneration, new ChannelReference(file, fileGeneration, channel, onClose), bufferSize); + final TranslogWriter writer = new TranslogWriter(shardId, fileGeneration, channel, file, bufferSize); return writer; } catch (Throwable throwable) { // if we fail to bake the file-generation into the checkpoint we stick with the file and once we recover and that @@ -99,9 +101,12 @@ public class TranslogWriter extends TranslogReader { throw throwable; } } - /** If this {@code TranslogWriter} was closed as a side-effect of a tragic exception, - * e.g. disk full while flushing a new segment, this returns the root cause exception. - * Otherwise (no tragic exception has occurred) it returns null. */ + + /** + * If this {@code TranslogWriter} was closed as a side-effect of a tragic exception, + * e.g. disk full while flushing a new segment, this returns the root cause exception. + * Otherwise (no tragic exception has occurred) it returns null. + */ public Throwable getTragicException() { return tragedy; } @@ -110,7 +115,9 @@ public class TranslogWriter extends TranslogReader { assert throwable != null : "throwable must not be null in a tragic event"; if (tragedy == null) { tragedy = throwable; - } else { + } else if (tragedy != throwable) { + // it should be safe to call closeWithTragicEvents on multiple layers without + // worrying about self suppression. tragedy.addSuppressed(throwable); } close(); @@ -134,29 +141,27 @@ public class TranslogWriter extends TranslogReader { } /** - * write all buffered ops to disk and fsync file + * write all buffered ops to disk and fsync file. + * + * Note: any exception during the sync process will be interpreted as a tragic exception and the writer will be closed before + * raising the exception. */ public void sync() throws IOException { if (syncNeeded()) { synchronized (this) { - ensureOpen(); // this call gives a better exception that the incRef if we are closed by a tragic event - channelReference.incRef(); + ensureOpen(); + final long offsetToSync; + final int opsCounter; try { - final long offsetToSync; - final int opsCounter; outputStream.flush(); offsetToSync = totalOffset; opsCounter = operationCounter; - try { - checkpoint(offsetToSync, opsCounter, channelReference); - } catch (Throwable ex) { - closeWithTragicEvent(ex); - throw ex; - } - lastSyncedOffset = offsetToSync; - } finally { - channelReference.decRef(); + checkpoint(offsetToSync, opsCounter, generation, channel, path); + } catch (Throwable ex) { + closeWithTragicEvent(ex); + throw ex; } + lastSyncedOffset = offsetToSync; } } } @@ -177,76 +182,46 @@ public class TranslogWriter extends TranslogReader { } /** - * returns a new reader that follows the current writes (most importantly allows making - * repeated snapshots that includes new content) + * closes this writer and transfers it's underlying file channel to a new immutable reader */ - public TranslogReader newReaderFromWriter() { - ensureOpen(); - channelReference.incRef(); - boolean success = false; + public synchronized TranslogReader closeIntoReader() throws IOException { try { - final TranslogReader reader = new InnerReader(this.generation, firstOperationOffset, channelReference); - success = true; - return reader; - } finally { - if (!success) { - channelReference.decRef(); - } + sync(); // sync before we close.. + } catch (IOException e) { + closeWithTragicEvent(e); + throw e; } - } - - /** - * returns a new immutable reader which only exposes the current written operation * - */ - public ImmutableTranslogReader immutableReader() throws TranslogException { - if (channelReference.tryIncRef()) { - synchronized (this) { - try { - ensureOpen(); - outputStream.flush(); - ImmutableTranslogReader reader = new ImmutableTranslogReader(this.generation, channelReference, firstOperationOffset, getWrittenOffset(), operationCounter); - channelReference.incRef(); // for new reader - return reader; - } catch (Exception e) { - throw new TranslogException(shardId, "exception while creating an immutable reader", e); - } finally { - channelReference.decRef(); + if (closed.compareAndSet(false, true)) { + boolean success = false; + try { + final TranslogReader reader = new TranslogReader(generation, channel, path, firstOperationOffset, getWrittenOffset(), operationCounter); + success = true; + return reader; + } finally { + if (success == false) { + // close the channel, as we are closed and failed to create a new reader + IOUtils.closeWhileHandlingException(channel); } } } else { - throw new TranslogException(shardId, "can't increment channel [" + channelReference + "] ref count"); + throw new AlreadyClosedException("translog [" + getGeneration() + "] is already closed (path [" + path + "]", tragedy); } } + @Override + public synchronized Translog.Snapshot newSnapshot() { + ensureOpen(); + try { + sync(); + } catch (IOException e) { + throw new TranslogException(shardId, "exception while syncing before creating a snapshot", e); + } + return super.newSnapshot(); + } + private long getWrittenOffset() throws IOException { - return channelReference.getChannel().position(); - } - - /** - * this class is used when one wants a reference to this file which exposes all recently written operation. - * as such it needs access to the internals of the current reader - */ - final class InnerReader extends TranslogReader { - - public InnerReader(long generation, long fistOperationOffset, ChannelReference channelReference) { - super(generation, channelReference, fistOperationOffset); - } - - @Override - public long sizeInBytes() { - return TranslogWriter.this.sizeInBytes(); - } - - @Override - public int totalOperations() { - return TranslogWriter.this.totalOperations(); - } - - @Override - protected void readBytes(ByteBuffer buffer, long position) throws IOException { - TranslogWriter.this.readBytes(buffer, position); - } + return channel.position(); } /** @@ -264,13 +239,13 @@ public class TranslogWriter extends TranslogReader { @Override protected void readBytes(ByteBuffer targetBuffer, long position) throws IOException { - if (position+targetBuffer.remaining() > getWrittenOffset()) { + if (position + targetBuffer.remaining() > getWrittenOffset()) { synchronized (this) { // we only flush here if it's really really needed - try to minimize the impact of the read operation // in some cases ie. a tragic event we might still be able to read the relevant value // which is not really important in production but some test can make most strict assumptions // if we don't fail in this call unless absolutely necessary. - if (position+targetBuffer.remaining() > getWrittenOffset()) { + if (position + targetBuffer.remaining() > getWrittenOffset()) { outputStream.flush(); } } @@ -280,9 +255,9 @@ public class TranslogWriter extends TranslogReader { Channels.readFromFileChannelWithEofException(channel, position, targetBuffer); } - private synchronized void checkpoint(long lastSyncPosition, int operationCounter, ChannelReference channelReference) throws IOException { - channelReference.getChannel().force(false); - writeCheckpoint(lastSyncPosition, operationCounter, channelReference.getPath().getParent(), channelReference.getGeneration(), StandardOpenOption.WRITE); + private synchronized void checkpoint(long lastSyncPosition, int operationCounter, long generation, FileChannel translogFileChannel, Path translogFilePath) throws IOException { + translogFileChannel.force(false); + writeCheckpoint(lastSyncPosition, operationCounter, translogFilePath.getParent(), generation, StandardOpenOption.WRITE); } private static void writeCheckpoint(long syncPosition, int numOperations, Path translogFile, long generation, OpenOption... options) throws IOException { @@ -307,6 +282,17 @@ public class TranslogWriter extends TranslogReader { } } + @Override + public final void close() throws IOException { + if (closed.compareAndSet(false, true)) { + channel.close(); + } + } + + protected final boolean isClosed() { + return closed.get(); + } + private final class BufferedChannelOutputStream extends BufferedOutputStream { diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index faf7f73dc23..907a64f8175 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -22,9 +22,7 @@ package org.elasticsearch.indices; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.common.geo.ShapesAvailability; -import org.elasticsearch.common.geo.builders.ShapeBuilderRegistry; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.util.ExtensionPoint; import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; @@ -57,52 +55,6 @@ import org.elasticsearch.index.mapper.internal.VersionFieldMapper; import org.elasticsearch.index.mapper.ip.IpFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.percolator.PercolatorFieldMapper; -import org.elasticsearch.index.query.BoolQueryParser; -import org.elasticsearch.index.query.BoostingQueryParser; -import org.elasticsearch.index.query.CommonTermsQueryParser; -import org.elasticsearch.index.query.ConstantScoreQueryParser; -import org.elasticsearch.index.query.DisMaxQueryParser; -import org.elasticsearch.index.query.ExistsQueryParser; -import org.elasticsearch.index.query.FieldMaskingSpanQueryParser; -import org.elasticsearch.index.query.FuzzyQueryParser; -import org.elasticsearch.index.query.GeoBoundingBoxQueryParser; -import org.elasticsearch.index.query.GeoDistanceQueryParser; -import org.elasticsearch.index.query.GeoDistanceRangeQueryParser; -import org.elasticsearch.index.query.GeoPolygonQueryParser; -import org.elasticsearch.index.query.GeoShapeQueryParser; -import org.elasticsearch.index.query.GeohashCellQuery; -import org.elasticsearch.index.query.HasChildQueryParser; -import org.elasticsearch.index.query.HasParentQueryParser; -import org.elasticsearch.index.query.IdsQueryParser; -import org.elasticsearch.index.query.IndicesQueryParser; -import org.elasticsearch.index.query.MatchAllQueryParser; -import org.elasticsearch.index.query.MatchNoneQueryParser; -import org.elasticsearch.index.query.MatchQueryParser; -import org.elasticsearch.index.query.MoreLikeThisQueryParser; -import org.elasticsearch.index.query.MultiMatchQueryParser; -import org.elasticsearch.index.query.NestedQueryParser; -import org.elasticsearch.index.query.PrefixQueryParser; -import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.index.query.QueryStringQueryParser; -import org.elasticsearch.index.query.RangeQueryParser; -import org.elasticsearch.index.query.RegexpQueryParser; -import org.elasticsearch.index.query.ScriptQueryParser; -import org.elasticsearch.index.query.SimpleQueryStringParser; -import org.elasticsearch.index.query.SpanContainingQueryParser; -import org.elasticsearch.index.query.SpanFirstQueryParser; -import org.elasticsearch.index.query.SpanMultiTermQueryParser; -import org.elasticsearch.index.query.SpanNearQueryParser; -import org.elasticsearch.index.query.SpanNotQueryParser; -import org.elasticsearch.index.query.SpanOrQueryParser; -import org.elasticsearch.index.query.SpanTermQueryParser; -import org.elasticsearch.index.query.SpanWithinQueryParser; -import org.elasticsearch.index.query.TemplateQueryParser; -import org.elasticsearch.index.query.TermQueryParser; -import org.elasticsearch.index.query.TermsQueryParser; -import org.elasticsearch.index.query.TypeQueryParser; -import org.elasticsearch.index.query.WildcardQueryParser; -import org.elasticsearch.index.query.WrapperQueryParser; -import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.indices.cache.request.IndicesRequestCache; @@ -111,7 +63,6 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCacheListener; import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.RecoverySource; import org.elasticsearch.indices.recovery.RecoveryTarget; @@ -127,10 +78,6 @@ import java.util.Map; */ public class IndicesModule extends AbstractModule { - - private final ExtensionPoint.ClassSet queryParsers - = new ExtensionPoint.ClassSet<>("query_parser", QueryParser.class); - private final Map mapperParsers = new LinkedHashMap<>(); // Use a LinkedHashMap for metadataMappers because iteration order matters @@ -138,62 +85,10 @@ public class IndicesModule extends AbstractModule { = new LinkedHashMap<>(); public IndicesModule() { - registerBuiltinQueryParsers(); registerBuiltInMappers(); registerBuiltInMetadataMappers(); } - private void registerBuiltinQueryParsers() { - registerQueryParser(MatchQueryParser.class); - registerQueryParser(MultiMatchQueryParser.class); - registerQueryParser(NestedQueryParser.class); - registerQueryParser(HasChildQueryParser.class); - registerQueryParser(HasParentQueryParser.class); - registerQueryParser(DisMaxQueryParser.class); - registerQueryParser(IdsQueryParser.class); - registerQueryParser(MatchAllQueryParser.class); - registerQueryParser(QueryStringQueryParser.class); - registerQueryParser(BoostingQueryParser.class); - registerQueryParser(BoolQueryParser.class); - registerQueryParser(TermQueryParser.class); - registerQueryParser(TermsQueryParser.class); - registerQueryParser(FuzzyQueryParser.class); - registerQueryParser(RegexpQueryParser.class); - registerQueryParser(RangeQueryParser.class); - registerQueryParser(PrefixQueryParser.class); - registerQueryParser(WildcardQueryParser.class); - registerQueryParser(ConstantScoreQueryParser.class); - registerQueryParser(SpanTermQueryParser.class); - registerQueryParser(SpanNotQueryParser.class); - registerQueryParser(SpanWithinQueryParser.class); - registerQueryParser(SpanContainingQueryParser.class); - registerQueryParser(FieldMaskingSpanQueryParser.class); - registerQueryParser(SpanFirstQueryParser.class); - registerQueryParser(SpanNearQueryParser.class); - registerQueryParser(SpanOrQueryParser.class); - registerQueryParser(MoreLikeThisQueryParser.class); - registerQueryParser(WrapperQueryParser.class); - registerQueryParser(IndicesQueryParser.class); - registerQueryParser(CommonTermsQueryParser.class); - registerQueryParser(SpanMultiTermQueryParser.class); - registerQueryParser(FunctionScoreQueryParser.class); - registerQueryParser(SimpleQueryStringParser.class); - registerQueryParser(TemplateQueryParser.class); - registerQueryParser(TypeQueryParser.class); - registerQueryParser(ScriptQueryParser.class); - registerQueryParser(GeoDistanceQueryParser.class); - registerQueryParser(GeoDistanceRangeQueryParser.class); - registerQueryParser(GeoBoundingBoxQueryParser.class); - registerQueryParser(GeohashCellQuery.Parser.class); - registerQueryParser(GeoPolygonQueryParser.class); - registerQueryParser(ExistsQueryParser.class); - registerQueryParser(MatchNoneQueryParser.class); - - if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { - registerQueryParser(GeoShapeQueryParser.class); - } - } - private void registerBuiltInMappers() { registerMapper(ByteFieldMapper.CONTENT_TYPE, new ByteFieldMapper.TypeParser()); registerMapper(ShortFieldMapper.CONTENT_TYPE, new ShortFieldMapper.TypeParser()); @@ -237,10 +132,6 @@ public class IndicesModule extends AbstractModule { // last so that it can see all other mappers, including those coming from plugins } - public void registerQueryParser(Class queryParser) { - queryParsers.registerExtension(queryParser); - } - /** * Register a mapper for the given type. */ @@ -263,7 +154,6 @@ public class IndicesModule extends AbstractModule { @Override protected void configure() { - bindQueryParsersExtension(); bindMapperExtension(); bind(IndicesService.class).asEagerSingleton(); @@ -284,7 +174,6 @@ public class IndicesModule extends AbstractModule { bind(IndicesFieldDataCacheListener.class).asEagerSingleton(); bind(TermVectorsService.class).asEagerSingleton(); bind(NodeServicesProvider.class).asEagerSingleton(); - bind(ShapeBuilderRegistry.class).asEagerSingleton(); } // public for testing @@ -303,9 +192,4 @@ public class IndicesModule extends AbstractModule { protected void bindMapperExtension() { bind(MapperRegistry.class).toInstance(getMapperRegistry()); } - - protected void bindQueryParsersExtension() { - queryParsers.bind(binder()); - bind(IndicesQueriesRegistry.class).asEagerSingleton(); - } } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index c3c0ffe4732..fdc448989d5 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -70,7 +71,6 @@ import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.nio.file.Files; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -101,6 +101,7 @@ public class IndicesService extends AbstractLifecycleComponent i private final IndicesQueriesRegistry indicesQueriesRegistry; private final ClusterService clusterService; private final IndexNameExpressionResolver indexNameExpressionResolver; + private final IndexScopedSettings indexScopeSetting; private volatile Map indices = emptyMap(); private final Map> pendingDeletes = new HashMap<>(); private final OldShardsStats oldShardsStats = new OldShardsStats(); @@ -116,7 +117,7 @@ public class IndicesService extends AbstractLifecycleComponent i public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv, ClusterSettings clusterSettings, AnalysisRegistry analysisRegistry, IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, - ClusterService clusterService, MapperRegistry mapperRegistry, ThreadPool threadPool) { + ClusterService clusterService, MapperRegistry mapperRegistry, ThreadPool threadPool, IndexScopedSettings indexScopedSettings) { super(settings); this.pluginsService = pluginsService; this.nodeEnv = nodeEnv; @@ -130,6 +131,7 @@ public class IndicesService extends AbstractLifecycleComponent i clusterSettings.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING, indexStoreConfig::setRateLimitingType); clusterSettings.addSettingsUpdateConsumer(IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING, indexStoreConfig::setRateLimitingThrottle); indexingMemoryController = new IndexingMemoryController(settings, threadPool, this); + this.indexScopeSetting = indexScopedSettings; } @Override @@ -280,7 +282,7 @@ public class IndicesService extends AbstractLifecycleComponent i } final String indexName = indexMetaData.getIndex(); final Predicate indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(indexName, indexExpression, clusterService.state()); - final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, Collections.emptyList(), indexNameMatcher); + final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexNameMatcher, indexScopeSetting); Index index = new Index(indexMetaData.getIndex()); if (indices.containsKey(index.name())) { throw new IndexAlreadyExistsException(index); @@ -570,7 +572,7 @@ public class IndicesService extends AbstractLifecycleComponent i // play safe here and make sure that we take node level settings into account. // we might run on nodes where we use shard FS and then in the future don't delete // actual content. - return new IndexSettings(metaData, settings, Collections.emptyList()); + return new IndexSettings(metaData, settings); } /** diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java index 8c3ba6dc47d..c3364de9d5b 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesWarmer.java @@ -39,8 +39,6 @@ import java.util.concurrent.TimeUnit; */ public final class IndicesWarmer extends AbstractComponent { - public static final String INDEX_WARMER_ENABLED = "index.warmer.enabled"; - private final ThreadPool threadPool; private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); @@ -62,8 +60,7 @@ public final class IndicesWarmer extends AbstractComponent { if (shard.state() == IndexShardState.CLOSED) { return; } - final Settings indexSettings = settings.getSettings(); - if (!indexSettings.getAsBoolean(INDEX_WARMER_ENABLED, settings.getNodeSettings().getAsBoolean(INDEX_WARMER_ENABLED, true))) { + if (settings.isWarmerEnabled() == false) { return; } if (logger.isTraceEnabled()) { diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 7b7fca4b37b..e73396fcd7f 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -78,7 +78,7 @@ public final class AnalysisModule extends AbstractModule { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("_na_").settings(build).build(); - NA_INDEX_SETTINGS = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + NA_INDEX_SETTINGS = new IndexSettings(metaData, Settings.EMPTY); } private static final IndexSettings NA_INDEX_SETTINGS; private final Environment environment; diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index 3e63b6fba65..f99b39ef620 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -70,9 +71,9 @@ import java.util.function.Function; */ public class HunspellService extends AbstractComponent { - public final static String HUNSPELL_LAZY_LOAD = "indices.analysis.hunspell.dictionary.lazy"; - public final static String HUNSPELL_IGNORE_CASE = "indices.analysis.hunspell.dictionary.ignore_case"; - private final static String OLD_HUNSPELL_LOCATION = "indices.analysis.hunspell.dictionary.location"; + public final static Setting HUNSPELL_LAZY_LOAD = Setting.boolSetting("indices.analysis.hunspell.dictionary.lazy", Boolean.FALSE, false, Setting.Scope.CLUSTER); + public final static Setting HUNSPELL_IGNORE_CASE = Setting.boolSetting("indices.analysis.hunspell.dictionary.ignore_case", Boolean.FALSE, false, Setting.Scope.CLUSTER); + public final static Setting HUNSPELL_DICTIONARY_OPTIONS = Setting.groupSetting("indices.analysis.hunspell.dictionary.", false, Setting.Scope.CLUSTER); private final ConcurrentHashMap dictionaries = new ConcurrentHashMap<>(); private final Map knownDictionaries; private final boolean defaultIgnoreCase; @@ -82,8 +83,8 @@ public class HunspellService extends AbstractComponent { public HunspellService(final Settings settings, final Environment env, final Map knownDictionaries) throws IOException { super(settings); this.knownDictionaries = Collections.unmodifiableMap(knownDictionaries); - this.hunspellDir = resolveHunspellDirectory(settings, env); - this.defaultIgnoreCase = settings.getAsBoolean(HUNSPELL_IGNORE_CASE, false); + this.hunspellDir = resolveHunspellDirectory(env); + this.defaultIgnoreCase = HUNSPELL_IGNORE_CASE.get(settings); this.loadingFunction = (locale) -> { try { return loadDictionary(locale, settings, env); @@ -91,7 +92,7 @@ public class HunspellService extends AbstractComponent { throw new IllegalStateException("failed to load hunspell dictionary for locale: " + locale, e); } }; - if (!settings.getAsBoolean(HUNSPELL_LAZY_LOAD, false)) { + if (!HUNSPELL_LAZY_LOAD.get(settings)) { scanAndLoadDictionaries(); } @@ -110,11 +111,7 @@ public class HunspellService extends AbstractComponent { return dictionary; } - private Path resolveHunspellDirectory(Settings settings, Environment env) { - String location = settings.get(OLD_HUNSPELL_LOCATION, null); - if (location != null) { - throw new IllegalArgumentException("please, put your hunspell dictionaries under config/hunspell !"); - } + private Path resolveHunspellDirectory(Environment env) { return env.configFile().resolve("hunspell"); } @@ -162,7 +159,8 @@ public class HunspellService extends AbstractComponent { } // merging node settings with hunspell dictionary specific settings - nodeSettings = loadDictionarySettings(dicDir, nodeSettings.getByPrefix("indices.analysis.hunspell.dictionary." + locale + ".")); + Settings dictSettings = HUNSPELL_DICTIONARY_OPTIONS.get(nodeSettings); + nodeSettings = loadDictionarySettings(dicDir, dictSettings.getByPrefix(locale)); boolean ignoreCase = nodeSettings.getAsBoolean("ignore_case", defaultIgnoreCase); diff --git a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java index a00cc7e787c..3cdb637d370 100644 --- a/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/cache/request/IndicesRequestCache.java @@ -38,7 +38,9 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -78,11 +80,11 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis * A setting to enable or disable request caching on an index level. Its dynamic by default * since we are checking on the cluster state IndexMetaData always. */ - public static final String INDEX_CACHE_REQUEST_ENABLED = "index.requests.cache.enable"; - public static final String INDICES_CACHE_REQUEST_CLEAN_INTERVAL = "indices.requests.cache.clean_interval"; + public static final Setting INDEX_CACHE_REQUEST_ENABLED_SETTING = Setting.boolSetting("index.requests.cache.enable", true, true, Setting.Scope.INDEX); + public static final Setting INDICES_CACHE_REQUEST_CLEAN_INTERVAL = Setting.positiveTimeSetting("indices.requests.cache.clean_interval", TimeValue.timeValueSeconds(60), false, Setting.Scope.CLUSTER); - public static final String INDICES_CACHE_QUERY_SIZE = "indices.requests.cache.size"; - public static final String INDICES_CACHE_QUERY_EXPIRE = "indices.requests.cache.expire"; + public static final Setting INDICES_CACHE_QUERY_SIZE = Setting.byteSizeSetting("indices.requests.cache.size", "1%", false, Setting.Scope.CLUSTER); + public static final Setting INDICES_CACHE_QUERY_EXPIRE = Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), false, Setting.Scope.CLUSTER); private static final Set CACHEABLE_SEARCH_TYPES = EnumSet.of(SearchType.QUERY_THEN_FETCH, SearchType.QUERY_AND_FETCH); @@ -97,7 +99,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis //TODO make these changes configurable on the cluster level - private final String size; + private final ByteSizeValue size; private final TimeValue expire; private volatile Cache cache; @@ -107,23 +109,20 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis super(settings); this.clusterService = clusterService; this.threadPool = threadPool; - this.cleanInterval = settings.getAsTime(INDICES_CACHE_REQUEST_CLEAN_INTERVAL, TimeValue.timeValueSeconds(60)); + this.cleanInterval = INDICES_CACHE_REQUEST_CLEAN_INTERVAL.get(settings); - this.size = settings.get(INDICES_CACHE_QUERY_SIZE, "1%"); + this.size = INDICES_CACHE_QUERY_SIZE.get(settings); - this.expire = settings.getAsTime(INDICES_CACHE_QUERY_EXPIRE, null); + this.expire = INDICES_CACHE_QUERY_EXPIRE.exists(settings) ? INDICES_CACHE_QUERY_EXPIRE.get(settings) : null; buildCache(); this.reaper = new Reaper(); threadPool.schedule(cleanInterval, ThreadPool.Names.SAME, reaper); } - private boolean isCacheEnabled(Settings settings, boolean defaultEnable) { - return settings.getAsBoolean(INDEX_CACHE_REQUEST_ENABLED, defaultEnable); - } private void buildCache() { - long sizeInBytes = MemorySizeValue.parseBytesSizeValueOrHeapRatio(size, INDICES_CACHE_QUERY_SIZE).bytes(); + long sizeInBytes = size.bytes(); CacheBuilder cacheBuilder = CacheBuilder.builder() .setMaximumWeight(sizeInBytes).weigher((k, v) -> k.ramBytesUsed() + v.ramBytesUsed()).removalListener(this); @@ -182,7 +181,7 @@ public class IndicesRequestCache extends AbstractComponent implements RemovalLis } // if not explicitly set in the request, use the index setting, if not, use the request if (request.requestCache() == null) { - if (!isCacheEnabled(index.getSettings(), Boolean.FALSE)) { + if (INDEX_CACHE_REQUEST_ENABLED_SETTING.get(index.getSettings()) == false) { return false; } } else if (!request.requestCache()) { diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 9357de7b1eb..6052d109565 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -73,7 +73,6 @@ import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -257,7 +256,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent { try { if (indexShard.recoverFromStore(nodes.localNode())) { - shardStateAction.shardStarted(state, shardRouting, indexMetaData.getIndexUUID(), "after recovery from store"); + shardStateAction.shardStarted(shardRouting, indexMetaData.getIndexUUID(), "after recovery from store", SHARD_STATE_ACTION_LISTENER); } } catch (Throwable t) { handleRecoveryFailure(indexService, shardRouting, true, t); @@ -664,7 +663,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent { - public static final String FIELDDATA_CLEAN_INTERVAL_SETTING = "indices.fielddata.cache.cleanup_interval"; - public static final String INDICES_FIELDDATA_CACHE_SIZE_KEY = "indices.fielddata.cache.size"; + public static final Setting INDICES_FIELDDATA_CLEAN_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.fielddata.cache.cleanup_interval", TimeValue.timeValueMinutes(1), false, Setting.Scope.CLUSTER); + public static final Setting INDICES_FIELDDATA_CACHE_SIZE_KEY = Setting.byteSizeSetting("indices.fielddata.cache.size", new ByteSizeValue(-1), false, Setting.Scope.CLUSTER); private final IndicesFieldDataCacheListener indicesFieldDataCacheListener; @@ -68,18 +69,16 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL super(settings); this.threadPool = threadPool; this.indicesFieldDataCacheListener = indicesFieldDataCacheListener; - final String size = settings.get(INDICES_FIELDDATA_CACHE_SIZE_KEY, "-1"); - final long sizeInBytes = settings.getAsMemory(INDICES_FIELDDATA_CACHE_SIZE_KEY, "-1").bytes(); + final long sizeInBytes = INDICES_FIELDDATA_CACHE_SIZE_KEY.get(settings).bytes(); CacheBuilder cacheBuilder = CacheBuilder.builder() .removalListener(this); if (sizeInBytes > 0) { cacheBuilder.setMaximumWeight(sizeInBytes).weigher(new FieldDataWeigher()); } - logger.debug("using size [{}] [{}]", size, new ByteSizeValue(sizeInBytes)); cache = cacheBuilder.build(); - this.cleanInterval = settings.getAsTime(FIELDDATA_CLEAN_INTERVAL_SETTING, TimeValue.timeValueMinutes(1)); + this.cleanInterval = INDICES_FIELDDATA_CLEAN_INTERVAL_SETTING.get(settings); // Start thread that will manage cleaning the field data cache periodically threadPool.schedule(this.cleanInterval, ThreadPool.Names.SAME, new FieldDataCacheCleaner(this.cache, this.logger, this.threadPool, this.cleanInterval)); diff --git a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java index 08b7b34e91a..a9e90884a68 100644 --- a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java +++ b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java @@ -19,39 +19,18 @@ package org.elasticsearch.indices.query; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.EmptyQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryParser; - -import java.util.HashMap; import java.util.Map; -import java.util.Set; -import static java.util.Collections.unmodifiableMap; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryParser; public class IndicesQueriesRegistry extends AbstractComponent { private Map> queryParsers; - @Inject - public IndicesQueriesRegistry(Settings settings, Set injectedQueryParsers, NamedWriteableRegistry namedWriteableRegistry) { + public IndicesQueriesRegistry(Settings settings, Map> queryParsers) { super(settings); - Map> queryParsers = new HashMap<>(); - for (@SuppressWarnings("unchecked") QueryParser queryParser : injectedQueryParsers) { - for (String name : queryParser.names()) { - queryParsers.put(name, queryParser); - } - @SuppressWarnings("unchecked") NamedWriteable qb = queryParser.getBuilderPrototype(); - namedWriteableRegistry.registerPrototype(QueryBuilder.class, qb); - } - // EmptyQueryBuilder is not registered as query parser but used internally. - // We need to register it with the NamedWriteableRegistry in order to serialize it - namedWriteableRegistry.registerPrototype(QueryBuilder.class, EmptyQueryBuilder.PROTOTYPE); - this.queryParsers = unmodifiableMap(queryParsers); + this.queryParsers = queryParsers; } /** diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java index c0270e71721..1ef9215b7b4 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySource.java @@ -37,6 +37,7 @@ import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportService; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -82,7 +83,7 @@ public class RecoverySource extends AbstractComponent implements IndexEventListe } } - private RecoveryResponse recover(final StartRecoveryRequest request) { + private RecoveryResponse recover(final StartRecoveryRequest request) throws IOException { final IndexService indexService = indicesService.indexServiceSafe(request.shardId().index().name()); final IndexShard shard = indexService.getShard(request.shardId().id()); diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 94c78efccd8..4699e8d5ace 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -120,7 +120,7 @@ public class RecoverySourceHandler { /** * performs the recovery from the local engine to the target */ - public RecoveryResponse recoverToTarget() { + public RecoveryResponse recoverToTarget() throws IOException { try (Translog.View translogView = shard.acquireTranslogView()) { logger.trace("captured translog id [{}] for recovery", translogView.minTranslogGeneration()); final IndexCommit phase1Snapshot; @@ -144,8 +144,8 @@ public class RecoverySourceHandler { } logger.trace("snapshot translog for recovery. current size is [{}]", translogView.totalOperations()); - try (Translog.Snapshot phase2Snapshot = translogView.snapshot()) { - phase2(phase2Snapshot); + try { + phase2(translogView.snapshot()); } catch (Throwable e) { throw new RecoveryEngineException(shard.shardId(), 2, "phase2 failed", e); } @@ -308,7 +308,7 @@ public class RecoverySourceHandler { }); } - prepareTargetForTranslog(translogView); + prepareTargetForTranslog(translogView.totalOperations()); logger.trace("[{}][{}] recovery [phase1] to {}: took [{}]", indexName, shardId, request.targetNode(), stopWatch.totalTime()); response.phase1Time = stopWatch.totalTime().millis(); @@ -320,8 +320,7 @@ public class RecoverySourceHandler { } - - protected void prepareTargetForTranslog(final Translog.View translogView) { + protected void prepareTargetForTranslog(final int totalTranslogOps) { StopWatch stopWatch = new StopWatch().start(); logger.trace("{} recovery [phase1] to {}: prepare remote engine for translog", request.shardId(), request.targetNode()); final long startEngineStart = stopWatch.totalTime().millis(); @@ -332,7 +331,7 @@ public class RecoverySourceHandler { // operations. This ensures the shard engine is started and disables // garbage collection (not the JVM's GC!) of tombstone deletes transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.PREPARE_TRANSLOG, - new RecoveryPrepareForTranslogOperationsRequest(request.recoveryId(), request.shardId(), translogView.totalOperations()), + new RecoveryPrepareForTranslogOperationsRequest(request.recoveryId(), request.shardId(), totalTranslogOps), TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionTimeout()).build(), EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); } }); @@ -463,14 +462,14 @@ public class RecoverySourceHandler { // make sense to re-enable throttling in this phase cancellableThreads.execute(() -> { final RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest( - request.recoveryId(), request.shardId(), operations, snapshot.estimatedTotalOperations()); + request.recoveryId(), request.shardId(), operations, snapshot.totalOperations()); transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.TRANSLOG_OPS, translogOperationsRequest, recoveryOptions, EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); }); if (logger.isTraceEnabled()) { logger.trace("[{}][{}] sent batch of [{}][{}] (total: [{}]) translog operations to {}", indexName, shardId, ops, new ByteSizeValue(size), - snapshot.estimatedTotalOperations(), + snapshot.totalOperations(), request.targetNode()); } @@ -488,7 +487,7 @@ public class RecoverySourceHandler { if (!operations.isEmpty()) { cancellableThreads.execute(() -> { RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest( - request.recoveryId(), request.shardId(), operations, snapshot.estimatedTotalOperations()); + request.recoveryId(), request.shardId(), operations, snapshot.totalOperations()); transportService.submitRequest(request.targetNode(), RecoveryTarget.Actions.TRANSLOG_OPS, translogOperationsRequest, recoveryOptions, EmptyTransportResponseHandler.INSTANCE_SAME).txGet(); }); @@ -497,7 +496,7 @@ public class RecoverySourceHandler { if (logger.isTraceEnabled()) { logger.trace("[{}][{}] sent final batch of [{}][{}] (total: [{}]) translog operations to {}", indexName, shardId, ops, new ByteSizeValue(size), - snapshot.estimatedTotalOperations(), + snapshot.totalOperations(), request.targetNode()); } return totalOperations; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java index e849580b2c4..16bd1d46553 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/SharedFSRecoverySourceHandler.java @@ -58,7 +58,7 @@ public class SharedFSRecoverySourceHandler extends RecoverySourceHandler { shard.failShard("failed to close engine (phase1)", e); } } - prepareTargetForTranslog(Translog.View.EMPTY_VIEW); + prepareTargetForTranslog(0); finalizeRecovery(); return response; } catch (Throwable t) { diff --git a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java index 4a76d262130..49851180ec7 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java +++ b/core/src/main/java/org/elasticsearch/indices/store/IndicesStore.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -68,7 +69,7 @@ import java.util.concurrent.atomic.AtomicInteger; public class IndicesStore extends AbstractComponent implements ClusterStateListener, Closeable { // TODO this class can be foled into either IndicesService and partially into IndicesClusterStateService there is no need for a seperate public service - public static final String INDICES_STORE_DELETE_SHARD_TIMEOUT = "indices.store.delete.shard.timeout"; + public static final Setting INDICES_STORE_DELETE_SHARD_TIMEOUT = Setting.positiveTimeSetting("indices.store.delete.shard.timeout", new TimeValue(30, TimeUnit.SECONDS), false, Setting.Scope.CLUSTER); public static final String ACTION_SHARD_EXISTS = "internal:index/shard/exists"; private static final EnumSet ACTIVE_STATES = EnumSet.of(IndexShardState.STARTED, IndexShardState.RELOCATED); private final IndicesService indicesService; @@ -85,7 +86,7 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe this.clusterService = clusterService; this.transportService = transportService; transportService.registerRequestHandler(ACTION_SHARD_EXISTS, ShardActiveRequest::new, ThreadPool.Names.SAME, new ShardActiveRequestHandler()); - this.deleteShardTimeout = settings.getAsTime(INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(30, TimeUnit.SECONDS)); + this.deleteShardTimeout = INDICES_STORE_DELETE_SHARD_TIMEOUT.get(settings); clusterService.addLast(this); } @@ -111,11 +112,12 @@ public class IndicesStore extends AbstractComponent implements ClusterStateListe } for (IndexRoutingTable indexRoutingTable : event.state().routingTable()) { - IndexSettings indexSettings = new IndexSettings(event.state().getMetaData().index(indexRoutingTable.index()), settings, Collections.emptyList()); // Note, closed indices will not have any routing information, so won't be deleted for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { if (shardCanBeDeleted(event.state(), indexShardRoutingTable)) { ShardId shardId = indexShardRoutingTable.shardId(); + IndexService indexService = indicesService.indexService(indexRoutingTable.getIndex()); + IndexSettings indexSettings = indexService != null ? indexService.getIndexSettings() : new IndexSettings(event.state().getMetaData().index(indexRoutingTable.index()), settings); if (indicesService.canDeleteShardContent(shardId, indexSettings)) { deleteShardIfExistElseWhere(event.state(), indexShardRoutingTable); } diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 65902b443e9..6a6b05c4ad1 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -171,11 +172,7 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction { public static final Setting INDICES_TTL_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), true, Setting.Scope.CLUSTER); - public static final String INDEX_TTL_DISABLE_PURGE = "index.ttl.disable_purge"; private final ClusterService clusterService; private final IndicesService indicesService; @@ -164,8 +163,7 @@ public class IndicesTTLService extends AbstractLifecycleComponent WRITE_PORTS_FIELD_SETTING = Setting.boolSetting("node.portsfile", false, false, Setting.Scope.CLUSTER); private final Lifecycle lifecycle = new Lifecycle(); private final Injector injector; private final Settings settings; @@ -164,6 +166,7 @@ public class Node implements Releasable { final NetworkService networkService = new NetworkService(settings); final SettingsFilter settingsFilter = new SettingsFilter(settings); final ThreadPool threadPool = new ThreadPool(settings); + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); boolean success = false; try { final MonitorService monitorService = new MonitorService(settings, nodeEnvironment, threadPool); @@ -178,7 +181,7 @@ public class Node implements Releasable { modules.add(new SettingsModule(this.settings, settingsFilter)); modules.add(new EnvironmentModule(environment)); modules.add(new NodeModule(this, monitorService)); - modules.add(new NetworkModule(networkService, settings, false)); + modules.add(new NetworkModule(networkService, settings, false, namedWriteableRegistry)); modules.add(new ScriptModule(this.settings)); modules.add(new NodeEnvironmentModule(nodeEnvironment)); modules.add(new ClusterNameModule(this.settings)); @@ -186,7 +189,7 @@ public class Node implements Releasable { modules.add(new DiscoveryModule(this.settings)); modules.add(new ClusterModule(this.settings)); modules.add(new IndicesModule()); - modules.add(new SearchModule()); + modules.add(new SearchModule(settings, namedWriteableRegistry)); modules.add(new ActionModule(false)); modules.add(new GatewayModule(settings)); modules.add(new NodeClientModule()); @@ -273,7 +276,7 @@ public class Node implements Releasable { injector.getInstance(ResourceWatcherService.class).start(); injector.getInstance(TribeService.class).start(); - if (System.getProperty("es.tests.portsfile", "false").equals("true")) { + if (WRITE_PORTS_FIELD_SETTING.get(settings)) { if (settings.getAsBoolean("http.enabled", true)) { HttpServerTransport http = injector.getInstance(HttpServerTransport.class); writePortsFile("http", http.boundAddress()); diff --git a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java index 1c2ab33e9be..df4e09d28e8 100644 --- a/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/internal/InternalSettingsPreparer.java @@ -21,7 +21,6 @@ package org.elasticsearch.node.internal; import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.cli.Terminal; @@ -108,8 +107,7 @@ public class InternalSettingsPreparer { environment = new Environment(output.build()); // we put back the path.logs so we can use it in the logging configuration file - output.put("path.logs", cleanPath(environment.logsFile().toAbsolutePath().toString())); - + output.put(Environment.PATH_LOGS_SETTING.getKey(), cleanPath(environment.logsFile().toAbsolutePath().toString())); return new Environment(output.build()); } diff --git a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java index a57a96c631d..a7d088ce214 100644 --- a/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java +++ b/core/src/main/java/org/elasticsearch/plugins/DummyPluginInfo.java @@ -20,9 +20,9 @@ package org.elasticsearch.plugins; public class DummyPluginInfo extends PluginInfo { - private DummyPluginInfo(String name, String description, boolean site, String version, boolean jvm, String classname, boolean isolated) { - super(name, description, site, version, jvm, classname, isolated); + private DummyPluginInfo(String name, String description, String version, String classname, boolean isolated) { + super(name, description, version, classname, isolated); } - public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", true, "dummy_plugin_version", true, "DummyPluginName", true); + public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", "dummy_plugin_version", "DummyPluginName", true); } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java index 3062f01697d..76af7833f06 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginInfo.java @@ -42,19 +42,14 @@ public class PluginInfo implements Streamable, ToXContent { static final XContentBuilderString NAME = new XContentBuilderString("name"); static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description"); static final XContentBuilderString URL = new XContentBuilderString("url"); - static final XContentBuilderString SITE = new XContentBuilderString("site"); static final XContentBuilderString VERSION = new XContentBuilderString("version"); - static final XContentBuilderString JVM = new XContentBuilderString("jvm"); static final XContentBuilderString CLASSNAME = new XContentBuilderString("classname"); static final XContentBuilderString ISOLATED = new XContentBuilderString("isolated"); } private String name; private String description; - private boolean site; private String version; - - private boolean jvm; private String classname; private boolean isolated; @@ -66,15 +61,11 @@ public class PluginInfo implements Streamable, ToXContent { * * @param name Its name * @param description Its description - * @param site true if it's a site plugin - * @param jvm true if it's a jvm plugin * @param version Version number */ - PluginInfo(String name, String description, boolean site, String version, boolean jvm, String classname, boolean isolated) { + PluginInfo(String name, String description, String version, String classname, boolean isolated) { this.name = name; this.description = description; - this.site = site; - this.jvm = jvm; this.version = version; this.classname = classname; this.isolated = isolated; @@ -101,43 +92,28 @@ public class PluginInfo implements Streamable, ToXContent { throw new IllegalArgumentException("Property [version] is missing for plugin [" + name + "]"); } - boolean jvm = Boolean.parseBoolean(props.getProperty("jvm")); - boolean site = Boolean.parseBoolean(props.getProperty("site")); - if (jvm == false && site == false) { - throw new IllegalArgumentException("Plugin [" + name + "] must be at least a jvm or site plugin"); + String esVersionString = props.getProperty("elasticsearch.version"); + if (esVersionString == null) { + throw new IllegalArgumentException("Property [elasticsearch.version] is missing for plugin [" + name + "]"); } - boolean isolated = true; - String classname = "NA"; - if (jvm) { - String esVersionString = props.getProperty("elasticsearch.version"); - if (esVersionString == null) { - throw new IllegalArgumentException("Property [elasticsearch.version] is missing for jvm plugin [" + name + "]"); - } - Version esVersion = Version.fromString(esVersionString); - if (esVersion.equals(Version.CURRENT) == false) { - throw new IllegalArgumentException("Plugin [" + name + "] is incompatible with Elasticsearch [" + Version.CURRENT.toString() + - "]. Was designed for version [" + esVersionString + "]"); - } - String javaVersionString = props.getProperty("java.version"); - if (javaVersionString == null) { - throw new IllegalArgumentException("Property [java.version] is missing for jvm plugin [" + name + "]"); - } - JarHell.checkVersionFormat(javaVersionString); - JarHell.checkJavaVersion(name, javaVersionString); - isolated = Boolean.parseBoolean(props.getProperty("isolated", "true")); - classname = props.getProperty("classname"); - if (classname == null) { - throw new IllegalArgumentException("Property [classname] is missing for jvm plugin [" + name + "]"); - } + Version esVersion = Version.fromString(esVersionString); + if (esVersion.equals(Version.CURRENT) == false) { + throw new IllegalArgumentException("Plugin [" + name + "] is incompatible with Elasticsearch [" + Version.CURRENT.toString() + + "]. Was designed for version [" + esVersionString + "]"); + } + String javaVersionString = props.getProperty("java.version"); + if (javaVersionString == null) { + throw new IllegalArgumentException("Property [java.version] is missing for plugin [" + name + "]"); + } + JarHell.checkVersionFormat(javaVersionString); + JarHell.checkJavaVersion(name, javaVersionString); + boolean isolated = Boolean.parseBoolean(props.getProperty("isolated", "true")); + String classname = props.getProperty("classname"); + if (classname == null) { + throw new IllegalArgumentException("Property [classname] is missing for plugin [" + name + "]"); } - if (site) { - if (!Files.exists(dir.resolve("_site"))) { - throw new IllegalArgumentException("Plugin [" + name + "] is a site plugin but has no '_site/' directory"); - } - } - - return new PluginInfo(name, description, site, version, jvm, classname, isolated); + return new PluginInfo(name, description, version, classname, isolated); } /** @@ -155,46 +131,19 @@ public class PluginInfo implements Streamable, ToXContent { } /** - * @return true if it's a site plugin - */ - public boolean isSite() { - return site; - } - - /** - * @return true if it's a plugin running in the jvm - */ - public boolean isJvm() { - return jvm; - } - - /** - * @return true if jvm plugin has isolated classloader + * @return true if plugin has isolated classloader */ public boolean isIsolated() { return isolated; } /** - * @return jvm plugin's classname + * @return plugin's classname */ public String getClassname() { return classname; } - /** - * We compute the URL for sites: "/_plugin/" + name + "/" - * - * @return relative URL for site plugin - */ - public String getUrl() { - if (site) { - return ("/_plugin/" + name + "/"); - } else { - return null; - } - } - /** * @return Version number for the plugin */ @@ -212,8 +161,6 @@ public class PluginInfo implements Streamable, ToXContent { public void readFrom(StreamInput in) throws IOException { this.name = in.readString(); this.description = in.readString(); - this.site = in.readBoolean(); - this.jvm = in.readBoolean(); this.version = in.readString(); this.classname = in.readString(); this.isolated = in.readBoolean(); @@ -223,8 +170,6 @@ public class PluginInfo implements Streamable, ToXContent { public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeString(description); - out.writeBoolean(site); - out.writeBoolean(jvm); out.writeString(version); out.writeString(classname); out.writeBoolean(isolated); @@ -236,15 +181,8 @@ public class PluginInfo implements Streamable, ToXContent { builder.field(Fields.NAME, name); builder.field(Fields.VERSION, version); builder.field(Fields.DESCRIPTION, description); - if (site) { - builder.field(Fields.URL, getUrl()); - } - builder.field(Fields.JVM, jvm); - if (jvm) { - builder.field(Fields.CLASSNAME, classname); - builder.field(Fields.ISOLATED, isolated); - } - builder.field(Fields.SITE, site); + builder.field(Fields.CLASSNAME, classname); + builder.field(Fields.ISOLATED, isolated); builder.endObject(); return builder; @@ -274,14 +212,9 @@ public class PluginInfo implements Streamable, ToXContent { .append("- Plugin information:\n") .append("Name: ").append(name).append("\n") .append("Description: ").append(description).append("\n") - .append("Site: ").append(site).append("\n") .append("Version: ").append(version).append("\n") - .append("JVM: ").append(jvm).append("\n"); - - if (jvm) { - information.append(" * Classname: ").append(classname).append("\n"); - information.append(" * Isolated: ").append(isolated); - } + .append(" * Classname: ").append(classname).append("\n") + .append(" * Isolated: ").append(isolated); return information.toString(); } diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java index 7cd50409fb6..8e6391ee0c6 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginManager.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginManager.java @@ -258,9 +258,7 @@ public class PluginManager { } // check for jar hell before any copying - if (info.isJvm()) { - jarHellCheck(root, info.isIsolated()); - } + jarHellCheck(root, info.isIsolated()); // read optional security policy (extra permissions) // if it exists, confirm or warn the user diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index 50938a1916c..4e61185491d 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -98,7 +98,7 @@ public class PluginsService extends AbstractComponent { // first we load plugins that are on the classpath. this is for tests and transport clients for (Class pluginClass : classpathPlugins) { Plugin plugin = loadPlugin(pluginClass, settings); - PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), false, "NA", true, pluginClass.getName(), false); + PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), "NA", pluginClass.getName(), false); if (logger.isTraceEnabled()) { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } @@ -136,18 +136,10 @@ public class PluginsService extends AbstractComponent { plugins = Collections.unmodifiableList(pluginsLoaded); - // We need to build a List of jvm and site plugins for checking mandatory plugins - Map jvmPlugins = new HashMap<>(); - List sitePlugins = new ArrayList<>(); - + // We need to build a List of plugins for checking mandatory plugins + Set pluginsNames = new HashSet<>(); for (Tuple tuple : plugins) { - PluginInfo info = tuple.v1(); - if (info.isJvm()) { - jvmPlugins.put(info.getName(), tuple.v2()); - } - if (info.isSite()) { - sitePlugins.add(info.getName()); - } + pluginsNames.add(tuple.v1().getName()); } // Checking expected plugins @@ -155,7 +147,7 @@ public class PluginsService extends AbstractComponent { if (mandatoryPlugins != null) { Set missingPlugins = new HashSet<>(); for (String mandatoryPlugin : mandatoryPlugins) { - if (!jvmPlugins.containsKey(mandatoryPlugin) && !sitePlugins.contains(mandatoryPlugin) && !missingPlugins.contains(mandatoryPlugin)) { + if (!pluginsNames.contains(mandatoryPlugin) && !missingPlugins.contains(mandatoryPlugin)) { missingPlugins.add(mandatoryPlugin); } } @@ -175,10 +167,11 @@ public class PluginsService extends AbstractComponent { jvmPluginNames.add(pluginInfo.getName()); } - logger.info("modules {}, plugins {}, sites {}", moduleNames, jvmPluginNames, sitePlugins); + logger.info("modules {}, plugins {}", moduleNames, jvmPluginNames); Map> onModuleReferences = new HashMap<>(); - for (Plugin plugin : jvmPlugins.values()) { + for (Tuple pluginEntry : plugins) { + Plugin plugin = pluginEntry.v2(); List list = new ArrayList<>(); for (Method method : plugin.getClass().getMethods()) { if (!method.getName().equals("onModule")) { @@ -304,9 +297,6 @@ public class PluginsService extends AbstractComponent { continue; // skip over .DS_Store etc } PluginInfo info = PluginInfo.readFromProperties(module); - if (!info.isJvm()) { - throw new IllegalStateException("modules must be jvm plugins: " + info); - } if (!info.isIsolated()) { throw new IllegalStateException("modules must be isolated: " + info); } @@ -353,17 +343,14 @@ public class PluginsService extends AbstractComponent { } List urls = new ArrayList<>(); - if (info.isJvm()) { - // a jvm plugin: gather urls for jar files - try (DirectoryStream jarStream = Files.newDirectoryStream(plugin, "*.jar")) { - for (Path jar : jarStream) { - // normalize with toRealPath to get symlinks out of our hair - urls.add(jar.toRealPath().toUri().toURL()); - } + try (DirectoryStream jarStream = Files.newDirectoryStream(plugin, "*.jar")) { + for (Path jar : jarStream) { + // normalize with toRealPath to get symlinks out of our hair + urls.add(jar.toRealPath().toUri().toURL()); } } final Bundle bundle; - if (info.isJvm() && info.isIsolated() == false) { + if (info.isIsolated() == false) { bundle = bundles.get(0); // purgatory } else { bundle = new Bundle(); @@ -395,15 +382,10 @@ public class PluginsService extends AbstractComponent { // create a child to load the plugins in this bundle ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader()); for (PluginInfo pluginInfo : bundle.plugins) { - final Plugin plugin; - if (pluginInfo.isJvm()) { - // reload lucene SPI with any new services from the plugin - reloadLuceneSPI(loader); - Class pluginClass = loadPluginClass(pluginInfo.getClassname(), loader); - plugin = loadPlugin(pluginClass, settings); - } else { - plugin = new SitePlugin(pluginInfo.getName(), pluginInfo.getDescription()); - } + // reload lucene SPI with any new services from the plugin + reloadLuceneSPI(loader); + final Class pluginClass = loadPluginClass(pluginInfo.getClassname(), loader); + final Plugin plugin = loadPlugin(pluginClass, settings); plugins.add(new Tuple<>(pluginInfo, plugin)); } } diff --git a/core/src/main/java/org/elasticsearch/plugins/SitePlugin.java b/core/src/main/java/org/elasticsearch/plugins/SitePlugin.java deleted file mode 100644 index 4c12f2095bb..00000000000 --- a/core/src/main/java/org/elasticsearch/plugins/SitePlugin.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plugins; - -/** A site-only plugin, just serves resources */ -final class SitePlugin extends Plugin { - final String name; - final String description; - - SitePlugin(String name, String description) { - this.name = name; - this.description = description; - } - - @Override - public String name() { - return name; - } - - @Override - public String description() { - return description; - } -} diff --git a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java index 33f9d4e7c30..d591f396400 100644 --- a/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.fs.FsBlobStore; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -33,6 +34,7 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import java.io.IOException; import java.nio.file.Path; +import java.util.function.Function; /** * Shared file system implementation of the BlobStoreRepository @@ -49,6 +51,13 @@ public class FsRepository extends BlobStoreRepository { public final static String TYPE = "fs"; + public static final Setting LOCATION_SETTING = new Setting<>("location", "", Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_LOCATION_SETTING = new Setting<>("repositories.fs.location", "", Function.identity(), false, Setting.Scope.CLUSTER); + public static final Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", "-1", false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_CHUNK_SIZE_SETTING = Setting.byteSizeSetting("repositories.fs.chunk_size", "-1", false, Setting.Scope.CLUSTER); + public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_COMPRESS_SETTING = Setting.boolSetting("repositories.fs.compress", false, false, Setting.Scope.CLUSTER); + private final FsBlobStore blobStore; private ByteSizeValue chunkSize; @@ -68,7 +77,7 @@ public class FsRepository extends BlobStoreRepository { public FsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); Path locationFile; - String location = repositorySettings.settings().get("location", settings.get("repositories.fs.location")); + String location = LOCATION_SETTING.exists(repositorySettings.settings()) ? LOCATION_SETTING.get(repositorySettings.settings()) : REPOSITORIES_LOCATION_SETTING.get(settings); if (location == null) { logger.warn("the repository location is missing, it should point to a shared file system location that is available on all master and data nodes"); throw new RepositoryException(name.name(), "missing location"); @@ -85,8 +94,14 @@ public class FsRepository extends BlobStoreRepository { } blobStore = new FsBlobStore(settings, locationFile); - this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("repositories.fs.chunk_size", null)); - this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("repositories.fs.compress", false)); + if (CHUNK_SIZE_SETTING.exists(repositorySettings.settings())) { + this.chunkSize = CHUNK_SIZE_SETTING.get(repositorySettings.settings()); + } else if (REPOSITORIES_CHUNK_SIZE_SETTING.exists(settings)) { + this.chunkSize = REPOSITORIES_CHUNK_SIZE_SETTING.get(settings); + } else { + this.chunkSize = null; + } + this.compress = COMPRESS_SETTING.exists(repositorySettings.settings()) ? COMPRESS_SETTING.get(repositorySettings.settings()) : REPOSITORIES_COMPRESS_SETTING.get(settings); this.basePath = BlobPath.cleanPath(); } diff --git a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java index 4d361683e5c..2d15db245aa 100644 --- a/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/uri/URLRepository.java @@ -20,11 +20,11 @@ package org.elasticsearch.repositories.uri; import org.elasticsearch.cluster.metadata.SnapshotId; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.url.URLBlobStore; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.URIPattern; import org.elasticsearch.env.Environment; import org.elasticsearch.index.snapshots.IndexShardRepository; @@ -34,9 +34,13 @@ import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import java.io.IOException; +import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; +import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.function.Function; /** * Read-only URL-based implementation of the BlobStoreRepository @@ -51,13 +55,21 @@ public class URLRepository extends BlobStoreRepository { public final static String TYPE = "url"; - public final static String[] DEFAULT_SUPPORTED_PROTOCOLS = {"http", "https", "ftp", "file", "jar"}; + public static final Setting> SUPPORTED_PROTOCOLS_SETTING = Setting.listSetting("repositories.url.supported_protocols", + Arrays.asList("http", "https", "ftp", "file", "jar"), Function.identity(), false, Setting.Scope.CLUSTER); - public final static String SUPPORTED_PROTOCOLS_SETTING = "repositories.url.supported_protocols"; + public static final Setting> ALLOWED_URLS_SETTING = Setting.listSetting("repositories.url.allowed_urls", + Collections.emptyList(), URIPattern::new, false, Setting.Scope.CLUSTER); - public final static String ALLOWED_URLS_SETTING = "repositories.url.allowed_urls"; + public static final Setting URL_SETTING = new Setting<>("url", "http:", URLRepository::parseURL, false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_URL_SETTING = new Setting<>("repositories.url.url", (s) -> s.get("repositories.uri.url", "http:"), + URLRepository::parseURL, false, Setting.Scope.CLUSTER); - private final String[] supportedProtocols; + public static final Setting LIST_DIRECTORIES_SETTING = Setting.boolSetting("list_directories", true, false, Setting.Scope.CLUSTER); + public static final Setting REPOSITORIES_LIST_DIRECTORIES_SETTING = Setting.boolSetting("repositories.uri.list_directories", true, + false, Setting.Scope.CLUSTER); + + private final List supportedProtocols; private final URIPattern[] urlWhiteList; @@ -79,21 +91,16 @@ public class URLRepository extends BlobStoreRepository { @Inject public URLRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, Environment environment) throws IOException { super(name.getName(), repositorySettings, indexShardRepository); - URL url; - String path = repositorySettings.settings().get("url", settings.get("repositories.url.url", settings.get("repositories.uri.url"))); - if (path == null) { + + if (URL_SETTING.exists(repositorySettings.settings()) == false && REPOSITORIES_URL_SETTING.exists(settings) == false) { throw new RepositoryException(name.name(), "missing url"); - } else { - url = new URL(path); - } - supportedProtocols = settings.getAsArray(SUPPORTED_PROTOCOLS_SETTING, DEFAULT_SUPPORTED_PROTOCOLS); - String[] urlWhiteList = settings.getAsArray(ALLOWED_URLS_SETTING, Strings.EMPTY_ARRAY); - this.urlWhiteList = new URIPattern[urlWhiteList.length]; - for (int i = 0; i < urlWhiteList.length; i++) { - this.urlWhiteList[i] = new URIPattern(urlWhiteList[i]); } + supportedProtocols = SUPPORTED_PROTOCOLS_SETTING.get(settings); + urlWhiteList = ALLOWED_URLS_SETTING.get(settings).toArray(new URIPattern[]{}); this.environment = environment; - listDirectories = repositorySettings.settings().getAsBoolean("list_directories", settings.getAsBoolean("repositories.uri.list_directories", true)); + listDirectories = LIST_DIRECTORIES_SETTING.exists(repositorySettings.settings()) ? LIST_DIRECTORIES_SETTING.get(repositorySettings.settings()) : REPOSITORIES_LIST_DIRECTORIES_SETTING.get(settings); + + URL url = URL_SETTING.exists(repositorySettings.settings()) ? URL_SETTING.get(repositorySettings.settings()) : REPOSITORIES_URL_SETTING.get(settings); URL normalizedURL = checkURL(url); blobStore = new URLBlobStore(settings, normalizedURL); basePath = BlobPath.cleanPath(); @@ -147,8 +154,8 @@ public class URLRepository extends BlobStoreRepository { // We didn't match white list - try to resolve against path.repo URL normalizedUrl = environment.resolveRepoURL(url); if (normalizedUrl == null) { - logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting: [{}] or by repositories.url.allowed_urls setting: [{}] ", url, environment.repoFiles()); - throw new RepositoryException(repositoryName, "file url [" + url + "] doesn't match any of the locations specified by path.repo or repositories.url.allowed_urls"); + logger.warn("The specified url [{}] doesn't start with any repository paths specified by the path.repo setting or by {} setting: [{}] ", url, ALLOWED_URLS_SETTING.getKey(), environment.repoFiles()); + throw new RepositoryException(repositoryName, "file url [" + url + "] doesn't match any of the locations specified by path.repo or " + ALLOWED_URLS_SETTING.getKey()); } return normalizedUrl; } @@ -161,4 +168,11 @@ public class URLRepository extends BlobStoreRepository { return true; } + private static URL parseURL(String s) { + try { + return new URL(s); + } catch (MalformedURLException e) { + throw new IllegalArgumentException("Unable to parse URL repository setting", e); + } + } } diff --git a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 294338c0501..bb99218855f 100644 --- a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -71,7 +71,7 @@ public abstract class BaseRestHandler extends AbstractComponent implements RestH this.headers = headers; } - private static void copyHeadersAndContext(ActionRequest actionRequest, RestRequest restRequest, Set headers) { + private static void copyHeadersAndContext(ActionRequest actionRequest, RestRequest restRequest, Set headers) { for (String usefulHeader : headers) { String headerValue = restRequest.header(usefulHeader); if (headerValue != null) { @@ -82,7 +82,8 @@ public abstract class BaseRestHandler extends AbstractComponent implements RestH } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + Action action, Request request, ActionListener listener) { copyHeadersAndContext(request, restRequest, headers); super.doExecute(action, request, listener); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java index fc4432a658f..5acbfc48d24 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/settings/RestClusterGetSettingsAction.java @@ -58,7 +58,7 @@ public class RestClusterGetSettingsAction extends BaseRestHandler { ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest() .routingTable(false) .nodes(false); - final boolean renderDefaults = request.paramAsBoolean("defaults", false); + final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); clusterStateRequest.local(request.paramAsBoolean("local", clusterStateRequest.local())); client.admin().cluster().state(clusterStateRequest, new RestBuilderListener(channel) { @Override diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java index b7371f7b80e..e23dec0f0bc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/get/RestGetIndicesAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,9 +53,12 @@ import static org.elasticsearch.rest.RestStatus.OK; */ public class RestGetIndicesAction extends BaseRestHandler { + private final IndexScopedSettings indexScopedSettings; + @Inject - public RestGetIndicesAction(Settings settings, RestController controller, Client client) { + public RestGetIndicesAction(Settings settings, RestController controller, Client client, IndexScopedSettings indexScopedSettings) { super(settings, controller, client); + this.indexScopedSettings = indexScopedSettings; controller.registerHandler(GET, "/{index}", this); controller.registerHandler(GET, "/{index}/{type}", this); } @@ -133,9 +137,15 @@ public class RestGetIndicesAction extends BaseRestHandler { } private void writeSettings(Settings settings, XContentBuilder builder, Params params) throws IOException { + final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); builder.startObject(Fields.SETTINGS); settings.toXContent(builder, params); builder.endObject(); + if (renderDefaults) { + builder.startObject("defaults"); + indexScopedSettings.diff(settings, settings).toXContent(builder, request); + builder.endObject(); + } } }); @@ -145,7 +155,6 @@ public class RestGetIndicesAction extends BaseRestHandler { static final XContentBuilderString ALIASES = new XContentBuilderString("aliases"); static final XContentBuilderString MAPPINGS = new XContentBuilderString("mappings"); static final XContentBuilderString SETTINGS = new XContentBuilderString("settings"); - static final XContentBuilderString WARMERS = new XContentBuilderString("warmers"); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java index f27897aa731..b924acc5fb3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/settings/RestGetSettingsAction.java @@ -26,9 +26,9 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; @@ -42,9 +42,12 @@ import static org.elasticsearch.rest.RestStatus.OK; public class RestGetSettingsAction extends BaseRestHandler { + private final IndexScopedSettings indexScopedSettings; + @Inject - public RestGetSettingsAction(Settings settings, RestController controller, Client client) { + public RestGetSettingsAction(Settings settings, RestController controller, Client client, IndexScopedSettings indexScopedSettings) { super(settings, controller, client); + this.indexScopedSettings = indexScopedSettings; controller.registerHandler(GET, "/{index}/_settings/{name}", this); controller.registerHandler(GET, "/_settings/{name}", this); controller.registerHandler(GET, "/{index}/_setting/{name}", this); @@ -53,6 +56,7 @@ public class RestGetSettingsAction extends BaseRestHandler { @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { final String[] names = request.paramAsStringArrayOrEmptyIfAll("name"); + final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); GetSettingsRequest getSettingsRequest = new GetSettingsRequest() .indices(Strings.splitStringByCommaToArray(request.param("index"))) .indicesOptions(IndicesOptions.fromRequest(request, IndicesOptions.strictExpandOpen())) @@ -71,9 +75,14 @@ public class RestGetSettingsAction extends BaseRestHandler { continue; } builder.startObject(cursor.key, XContentBuilder.FieldCaseConversion.NONE); - builder.startObject(Fields.SETTINGS); + builder.startObject("settings"); cursor.value.toXContent(builder, request); builder.endObject(); + if (renderDefaults) { + builder.startObject("defaults"); + indexScopedSettings.diff(cursor.value, settings).toXContent(builder, request); + builder.endObject(); + } builder.endObject(); } builder.endObject(); @@ -81,8 +90,4 @@ public class RestGetSettingsAction extends BaseRestHandler { } }); } - - static class Fields { - static final XContentBuilderString SETTINGS = new XContentBuilderString("settings"); - } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index 7bbc5b9eff9..c7eab8ca14b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -266,9 +266,9 @@ public class RestNodesAction extends AbstractCatAction { table.addCell(osStats == null ? null : Short.toString(osStats.getCpu().getPercent())); boolean hasLoadAverage = osStats != null && osStats.getCpu().getLoadAverage() != null; - table.addCell(!hasLoadAverage ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[0])); - table.addCell(!hasLoadAverage ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[1])); - table.addCell(!hasLoadAverage ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[2])); + table.addCell(!hasLoadAverage || osStats.getCpu().getLoadAverage()[0] == -1 ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[0])); + table.addCell(!hasLoadAverage || osStats.getCpu().getLoadAverage()[1] == -1 ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[1])); + table.addCell(!hasLoadAverage || osStats.getCpu().getLoadAverage()[2] == -1 ? null : String.format(Locale.ROOT, "%.2f", osStats.getCpu().getLoadAverage()[2])); table.addCell(jvmStats == null ? null : jvmStats.getUptime()); table.addCell(node.clientNode() ? "c" : node.dataNode() ? "d" : "-"); table.addCell(masterId == null ? "x" : masterId.equals(node.id()) ? "*" : node.masterNode() ? "m" : "-"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java index 34e05223657..1a37ab6da38 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java @@ -84,8 +84,6 @@ public class RestPluginsAction extends AbstractCatAction { table.addCell("name", "alias:n;desc:node name"); table.addCell("component", "alias:c;desc:component"); table.addCell("version", "alias:v;desc:component version"); - table.addCell("type", "alias:t;desc:type (j for JVM, s for Site)"); - table.addCell("url", "alias:u;desc:url for site plugins"); table.addCell("description", "alias:d;default:false;desc:plugin details"); table.endHeaders(); return table; @@ -104,22 +102,6 @@ public class RestPluginsAction extends AbstractCatAction { table.addCell(node.name()); table.addCell(pluginInfo.getName()); table.addCell(pluginInfo.getVersion()); - String type; - if (pluginInfo.isSite()) { - if (pluginInfo.isJvm()) { - type = "j/s"; - } else { - type = "s"; - } - } else { - if (pluginInfo.isJvm()) { - type = "j"; - } else { - type = ""; - } - } - table.addCell(type); - table.addCell(pluginInfo.getUrl()); table.addCell(pluginInfo.getDescription()); table.endRow(); } diff --git a/core/src/main/java/org/elasticsearch/script/AbstractExecutableScript.java b/core/src/main/java/org/elasticsearch/script/AbstractExecutableScript.java index 41ed875e096..a42ef54812b 100644 --- a/core/src/main/java/org/elasticsearch/script/AbstractExecutableScript.java +++ b/core/src/main/java/org/elasticsearch/script/AbstractExecutableScript.java @@ -24,9 +24,4 @@ public abstract class AbstractExecutableScript implements ExecutableScript { @Override public void setNextVar(String name, Object value) { } - - @Override - public Object unwrap(Object value) { - return value; - } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/script/ExecutableScript.java b/core/src/main/java/org/elasticsearch/script/ExecutableScript.java index 43c990201f2..bdf93acc4de 100644 --- a/core/src/main/java/org/elasticsearch/script/ExecutableScript.java +++ b/core/src/main/java/org/elasticsearch/script/ExecutableScript.java @@ -32,8 +32,12 @@ public interface ExecutableScript { Object run(); /** - * Unwraps a possible script value. For example, when passing vars and expecting the returned value to - * be part of the vars. + * Unwraps a possible script value. For example, when passing vars and + * expecting the returned value to be part of the vars. Javascript and + * Python need this but other scripting engines just return the values + * passed in. */ - Object unwrap(Object value); + default Object unwrap(Object value) { + return value; + } } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index c9e9f9a873d..9883d62c987 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -84,8 +85,7 @@ public class ScriptService extends AbstractComponent implements Closeable { static final String DISABLE_DYNAMIC_SCRIPTING_SETTING = "script.disable_dynamic"; public static final String DEFAULT_SCRIPTING_LANGUAGE_SETTING = "script.default_lang"; - public static final String SCRIPT_CACHE_SIZE_SETTING = "script.cache.max_size"; - public static final int SCRIPT_CACHE_SIZE_DEFAULT = 100; + public static final Setting SCRIPT_CACHE_SIZE_SETTING = Setting.intSetting("script.cache.max_size", 100, 0, false, Setting.Scope.CLUSTER); public static final String SCRIPT_CACHE_EXPIRE_SETTING = "script.cache.expire"; public static final String SCRIPT_INDEX = ".scripts"; public static final String DEFAULT_LANG = "groovy"; @@ -148,7 +148,7 @@ public class ScriptService extends AbstractComponent implements Closeable { this.scriptEngines = scriptEngines; this.scriptContextRegistry = scriptContextRegistry; - int cacheMaxSize = settings.getAsInt(SCRIPT_CACHE_SIZE_SETTING, SCRIPT_CACHE_SIZE_DEFAULT); + int cacheMaxSize = SCRIPT_CACHE_SIZE_SETTING.get(settings); TimeValue cacheExpire = settings.getAsTime(SCRIPT_CACHE_EXPIRE_SETTING, null); logger.debug("using script cache with max_size [{}], expire [{}]", cacheMaxSize, cacheExpire); diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 707336c2be9..9a4caebc8c3 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -19,11 +19,92 @@ package org.elasticsearch.search; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + +import org.apache.lucene.search.BooleanQuery; +import org.elasticsearch.common.geo.ShapesAvailability; +import org.elasticsearch.common.geo.builders.CircleBuilder; +import org.elasticsearch.common.geo.builders.EnvelopeBuilder; +import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; +import org.elasticsearch.common.geo.builders.LineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiLineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiPointBuilder; +import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; +import org.elasticsearch.common.geo.builders.PointBuilder; +import org.elasticsearch.common.geo.builders.PolygonBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.BoolQueryParser; +import org.elasticsearch.index.query.BoostingQueryParser; +import org.elasticsearch.index.query.CommonTermsQueryParser; +import org.elasticsearch.index.query.ConstantScoreQueryParser; +import org.elasticsearch.index.query.DisMaxQueryParser; +import org.elasticsearch.index.query.EmptyQueryBuilder; +import org.elasticsearch.index.query.ExistsQueryParser; +import org.elasticsearch.index.query.FieldMaskingSpanQueryParser; +import org.elasticsearch.index.query.FuzzyQueryParser; +import org.elasticsearch.index.query.GeoBoundingBoxQueryParser; +import org.elasticsearch.index.query.GeoDistanceQueryParser; +import org.elasticsearch.index.query.GeoDistanceRangeQueryParser; +import org.elasticsearch.index.query.GeoPolygonQueryParser; +import org.elasticsearch.index.query.GeoShapeQueryParser; +import org.elasticsearch.index.query.GeohashCellQuery; +import org.elasticsearch.index.query.HasChildQueryParser; +import org.elasticsearch.index.query.HasParentQueryParser; +import org.elasticsearch.index.query.IdsQueryParser; +import org.elasticsearch.index.query.IndicesQueryParser; +import org.elasticsearch.index.query.MatchAllQueryParser; +import org.elasticsearch.index.query.MatchNoneQueryParser; +import org.elasticsearch.index.query.MatchQueryParser; +import org.elasticsearch.index.query.MoreLikeThisQueryParser; +import org.elasticsearch.index.query.MultiMatchQueryParser; +import org.elasticsearch.index.query.NestedQueryParser; +import org.elasticsearch.index.query.ParentIdQueryParser; +import org.elasticsearch.index.query.PrefixQueryParser; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.QueryStringQueryParser; +import org.elasticsearch.index.query.RangeQueryParser; +import org.elasticsearch.index.query.RegexpQueryParser; +import org.elasticsearch.index.query.ScriptQueryParser; +import org.elasticsearch.index.query.SimpleQueryStringParser; +import org.elasticsearch.index.query.SpanContainingQueryParser; +import org.elasticsearch.index.query.SpanFirstQueryParser; +import org.elasticsearch.index.query.SpanMultiTermQueryParser; +import org.elasticsearch.index.query.SpanNearQueryParser; +import org.elasticsearch.index.query.SpanNotQueryParser; +import org.elasticsearch.index.query.SpanOrQueryParser; +import org.elasticsearch.index.query.SpanTermQueryParser; +import org.elasticsearch.index.query.SpanWithinQueryParser; +import org.elasticsearch.index.query.TemplateQueryParser; +import org.elasticsearch.index.query.TermQueryParser; +import org.elasticsearch.index.query.TermsQueryParser; +import org.elasticsearch.index.query.TypeQueryParser; +import org.elasticsearch.index.query.WildcardQueryParser; +import org.elasticsearch.index.query.WrapperQueryParser; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryParser; +import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper; +import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionParser; +import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionParser; +import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser; +import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionParser; +import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionParser; +import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionParser; +import org.elasticsearch.index.query.functionscore.weight.WeightBuilder; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.action.SearchServiceTransportAction; +import org.elasticsearch.search.aggregations.AggregationBinaryParseElement; import org.elasticsearch.search.aggregations.AggregationParseElement; import org.elasticsearch.search.aggregations.AggregationPhase; import org.elasticsearch.search.aggregations.Aggregator; @@ -136,40 +217,65 @@ import org.elasticsearch.search.controller.SearchPhaseController; import org.elasticsearch.search.dfs.DfsPhase; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.fetch.FieldsParseElement; import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase; import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement; import org.elasticsearch.search.fetch.innerhits.InnerHitsFetchSubPhase; import org.elasticsearch.search.fetch.matchedqueries.MatchedQueriesFetchSubPhase; +import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase; import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.script.ScriptFieldsParseElement; +import org.elasticsearch.search.fetch.source.FetchSourceParseElement; import org.elasticsearch.search.fetch.source.FetchSourceSubPhase; import org.elasticsearch.search.fetch.version.VersionFetchSubPhase; import org.elasticsearch.search.highlight.HighlightPhase; import org.elasticsearch.search.highlight.Highlighter; +import org.elasticsearch.search.highlight.HighlighterParseElement; import org.elasticsearch.search.highlight.Highlighters; import org.elasticsearch.search.query.QueryPhase; +import org.elasticsearch.search.sort.SortParseElement; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggesters; -import java.util.HashSet; -import java.util.Set; - /** * */ public class SearchModule extends AbstractModule { - private final Set> aggParsers = new HashSet<>(); - private final Set> pipelineAggParsers = new HashSet<>(); + private final Set aggParsers = new HashSet<>(); + private final Set pipelineAggParsers = new HashSet<>(); private final Highlighters highlighters = new Highlighters(); private final Suggesters suggesters = new Suggesters(); - private final Set> functionScoreParsers = new HashSet<>(); + /** + * Function score parsers constructed on registration. This is ok because + * they don't have any dependencies. + */ + private final Map> functionScoreParsers = new HashMap<>(); + /** + * Query parsers constructed at configure time. These have to be constructed + * at configure time because they depend on things that are registered by + * plugins (function score parsers). + */ + private final List>> queryParsers = new ArrayList<>(); private final Set> fetchSubPhases = new HashSet<>(); - private final Set> heuristicParsers = new HashSet<>(); - private final Set> modelParsers = new HashSet<>(); + private final Set heuristicParsers = new HashSet<>(); + private final Set modelParsers = new HashSet<>(); + + private final Settings settings; + private final NamedWriteableRegistry namedWriteableRegistry; // pkg private so tests can mock Class searchServiceImpl = SearchService.class; + public SearchModule(Settings settings, NamedWriteableRegistry namedWriteableRegistry) { + this.settings = settings; + this.namedWriteableRegistry = namedWriteableRegistry; + + registerBuiltinFunctionScoreParsers(); + registerBuiltinQueryParsers(); + } + public void registerHighlighter(String key, Class clazz) { highlighters.registerExtension(key, clazz); } @@ -178,19 +284,32 @@ public class SearchModule extends AbstractModule { suggesters.registerExtension(key, suggester); } - public void registerFunctionScoreParser(Class parser) { - functionScoreParsers.add(parser); + /** + * Register a new ScoreFunctionParser. + */ + public void registerFunctionScoreParser(ScoreFunctionParser parser) { + for (String name: parser.getNames()) { + Object oldValue = functionScoreParsers.putIfAbsent(name, parser); + if (oldValue != null) { + throw new IllegalArgumentException("Function score parser [" + oldValue + "] already registered for name [" + name + "]"); + } + } + namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, parser.getBuilderPrototype()); + } + + public void registerQueryParser(Supplier> parser) { + queryParsers.add(parser); } public void registerFetchSubPhase(Class subPhase) { fetchSubPhases.add(subPhase); } - public void registerHeuristicParser(Class parser) { + public void registerHeuristicParser(SignificanceHeuristicParser parser) { heuristicParsers.add(parser); } - public void registerModelParser(Class parser) { + public void registerModelParser(MovAvgModel.AbstractModelParser parser) { modelParsers.add(parser); } @@ -199,22 +318,24 @@ public class SearchModule extends AbstractModule { * * @param parser The parser for the custom aggregator. */ - public void registerAggregatorParser(Class parser) { + public void registerAggregatorParser(Aggregator.Parser parser) { aggParsers.add(parser); } - public void registerPipelineParser(Class parser) { + public void registerPipelineParser(PipelineAggregator.Parser parser) { pipelineAggParsers.add(parser); } @Override protected void configure() { + IndicesQueriesRegistry indicesQueriesRegistry = buildQueryParserRegistry(); + bind(IndicesQueriesRegistry.class).toInstance(indicesQueriesRegistry); configureSearch(); - configureAggs(); + configureAggs(indicesQueriesRegistry); configureHighlighters(); configureSuggesters(); - configureFunctionScore(); configureFetchSubPhase(); + configureShapes(); } protected void configureFetchSubPhase() { @@ -226,98 +347,98 @@ public class SearchModule extends AbstractModule { fetchSubPhaseMultibinder.addBinding().to(VersionFetchSubPhase.class); fetchSubPhaseMultibinder.addBinding().to(MatchedQueriesFetchSubPhase.class); fetchSubPhaseMultibinder.addBinding().to(HighlightPhase.class); + fetchSubPhaseMultibinder.addBinding().to(ParentFieldSubFetchPhase.class); for (Class clazz : fetchSubPhases) { fetchSubPhaseMultibinder.addBinding().to(clazz); } bind(InnerHitsFetchSubPhase.class).asEagerSingleton(); } - protected void configureSuggesters() { - suggesters.bind(binder()); + public IndicesQueriesRegistry buildQueryParserRegistry() { + Map> queryParsersMap = new HashMap<>(); + for (Supplier> parserSupplier : queryParsers) { + QueryParser parser = parserSupplier.get(); + for (String name: parser.names()) { + Object oldValue = queryParsersMap.putIfAbsent(name, parser); + if (oldValue != null) { + throw new IllegalArgumentException("Query parser [" + oldValue + "] already registered for name [" + name + "] while trying to register [" + parser + "]"); + } + } + namedWriteableRegistry.registerPrototype(QueryBuilder.class, parser.getBuilderPrototype()); + } + return new IndicesQueriesRegistry(settings, queryParsersMap); } - protected void configureFunctionScore() { - Multibinder parserMapBinder = Multibinder.newSetBinder(binder(), ScoreFunctionParser.class); - for (Class clazz : functionScoreParsers) { - parserMapBinder.addBinding().to(clazz); - } - bind(ScoreFunctionParserMapper.class).asEagerSingleton(); + protected void configureSuggesters() { + suggesters.bind(binder()); } protected void configureHighlighters() { highlighters.bind(binder()); } - protected void configureAggs() { - Multibinder multibinderAggParser = Multibinder.newSetBinder(binder(), Aggregator.Parser.class); - multibinderAggParser.addBinding().to(AvgParser.class); - multibinderAggParser.addBinding().to(SumParser.class); - multibinderAggParser.addBinding().to(MinParser.class); - multibinderAggParser.addBinding().to(MaxParser.class); - multibinderAggParser.addBinding().to(StatsParser.class); - multibinderAggParser.addBinding().to(ExtendedStatsParser.class); - multibinderAggParser.addBinding().to(ValueCountParser.class); - multibinderAggParser.addBinding().to(PercentilesParser.class); - multibinderAggParser.addBinding().to(PercentileRanksParser.class); - multibinderAggParser.addBinding().to(CardinalityParser.class); - multibinderAggParser.addBinding().to(GlobalParser.class); - multibinderAggParser.addBinding().to(MissingParser.class); - multibinderAggParser.addBinding().to(FilterParser.class); - multibinderAggParser.addBinding().to(FiltersParser.class); - multibinderAggParser.addBinding().to(SamplerParser.class); - multibinderAggParser.addBinding().to(DiversifiedSamplerParser.class); - multibinderAggParser.addBinding().to(TermsParser.class); - multibinderAggParser.addBinding().to(SignificantTermsParser.class); - multibinderAggParser.addBinding().to(RangeParser.class); - multibinderAggParser.addBinding().to(DateRangeParser.class); - multibinderAggParser.addBinding().to(IpRangeParser.class); - multibinderAggParser.addBinding().to(HistogramParser.class); - multibinderAggParser.addBinding().to(DateHistogramParser.class); - multibinderAggParser.addBinding().to(GeoDistanceParser.class); - multibinderAggParser.addBinding().to(GeoHashGridParser.class); - multibinderAggParser.addBinding().to(NestedParser.class); - multibinderAggParser.addBinding().to(ReverseNestedParser.class); - multibinderAggParser.addBinding().to(TopHitsParser.class); - multibinderAggParser.addBinding().to(GeoBoundsParser.class); - multibinderAggParser.addBinding().to(GeoCentroidParser.class); - multibinderAggParser.addBinding().to(ScriptedMetricParser.class); - multibinderAggParser.addBinding().to(ChildrenParser.class); - for (Class parser : aggParsers) { - multibinderAggParser.addBinding().to(parser); - } + protected void configureAggs(IndicesQueriesRegistry indicesQueriesRegistry) { - Multibinder multibinderPipelineAggParser = Multibinder.newSetBinder(binder(), PipelineAggregator.Parser.class); - multibinderPipelineAggParser.addBinding().to(DerivativeParser.class); - multibinderPipelineAggParser.addBinding().to(MaxBucketParser.class); - multibinderPipelineAggParser.addBinding().to(MinBucketParser.class); - multibinderPipelineAggParser.addBinding().to(AvgBucketParser.class); - multibinderPipelineAggParser.addBinding().to(SumBucketParser.class); - multibinderPipelineAggParser.addBinding().to(StatsBucketParser.class); - multibinderPipelineAggParser.addBinding().to(ExtendedStatsBucketParser.class); - multibinderPipelineAggParser.addBinding().to(PercentilesBucketParser.class); - multibinderPipelineAggParser.addBinding().to(MovAvgParser.class); - multibinderPipelineAggParser.addBinding().to(CumulativeSumParser.class); - multibinderPipelineAggParser.addBinding().to(BucketScriptParser.class); - multibinderPipelineAggParser.addBinding().to(BucketSelectorParser.class); - multibinderPipelineAggParser.addBinding().to(SerialDiffParser.class); - for (Class parser : pipelineAggParsers) { - multibinderPipelineAggParser.addBinding().to(parser); - } - bind(AggregatorParsers.class).asEagerSingleton(); - bind(AggregationParseElement.class).asEagerSingleton(); - bind(AggregationPhase.class).asEagerSingleton(); + MovAvgModelParserMapper movAvgModelParserMapper = new MovAvgModelParserMapper(modelParsers); - Multibinder heuristicParserMultibinder = Multibinder.newSetBinder(binder(), SignificanceHeuristicParser.class); - for (Class clazz : heuristicParsers) { - heuristicParserMultibinder.addBinding().to(clazz); - } - bind(SignificanceHeuristicParserMapper.class); + SignificanceHeuristicParserMapper significanceHeuristicParserMapper = new SignificanceHeuristicParserMapper(heuristicParsers); - Multibinder modelParserMultibinder = Multibinder.newSetBinder(binder(), MovAvgModel.AbstractModelParser.class); - for (Class clazz : modelParsers) { - modelParserMultibinder.addBinding().to(clazz); - } - bind(MovAvgModelParserMapper.class); + registerAggregatorParser(new AvgParser()); + registerAggregatorParser(new SumParser()); + registerAggregatorParser(new MinParser()); + registerAggregatorParser(new MaxParser()); + registerAggregatorParser(new StatsParser()); + registerAggregatorParser(new ExtendedStatsParser()); + registerAggregatorParser(new ValueCountParser()); + registerAggregatorParser(new PercentilesParser()); + registerAggregatorParser(new PercentileRanksParser()); + registerAggregatorParser(new CardinalityParser()); + registerAggregatorParser(new GlobalParser()); + registerAggregatorParser(new MissingParser()); + registerAggregatorParser(new FilterParser()); + registerAggregatorParser(new FiltersParser(indicesQueriesRegistry)); + registerAggregatorParser(new SamplerParser()); + registerAggregatorParser(new DiversifiedSamplerParser()); + registerAggregatorParser(new TermsParser()); + registerAggregatorParser(new SignificantTermsParser(significanceHeuristicParserMapper, indicesQueriesRegistry)); + registerAggregatorParser(new RangeParser()); + registerAggregatorParser(new DateRangeParser()); + registerAggregatorParser(new IpRangeParser()); + registerAggregatorParser(new HistogramParser()); + registerAggregatorParser(new DateHistogramParser()); + registerAggregatorParser(new GeoDistanceParser()); + registerAggregatorParser(new GeoHashGridParser()); + registerAggregatorParser(new NestedParser()); + registerAggregatorParser(new ReverseNestedParser()); + registerAggregatorParser(new TopHitsParser(new SortParseElement(), new FetchSourceParseElement(), new HighlighterParseElement(), + new FieldDataFieldsParseElement(), new ScriptFieldsParseElement(), new FieldsParseElement())); + registerAggregatorParser(new GeoBoundsParser()); + registerAggregatorParser(new GeoCentroidParser()); + registerAggregatorParser(new ScriptedMetricParser()); + registerAggregatorParser(new ChildrenParser()); + + registerPipelineParser(new DerivativeParser()); + registerPipelineParser(new MaxBucketParser()); + registerPipelineParser(new MinBucketParser()); + registerPipelineParser(new AvgBucketParser()); + registerPipelineParser(new SumBucketParser()); + registerPipelineParser(new StatsBucketParser()); + registerPipelineParser(new ExtendedStatsBucketParser()); + registerPipelineParser(new PercentilesBucketParser()); + registerPipelineParser(new MovAvgParser(movAvgModelParserMapper)); + registerPipelineParser(new CumulativeSumParser()); + registerPipelineParser(new BucketScriptParser()); + registerPipelineParser(new BucketSelectorParser()); + registerPipelineParser(new SerialDiffParser()); + + AggregatorParsers aggregatorParsers = new AggregatorParsers(aggParsers, pipelineAggParsers, namedWriteableRegistry); + AggregationParseElement aggParseElement = new AggregationParseElement(aggregatorParsers, indicesQueriesRegistry); + AggregationBinaryParseElement aggBinaryParseElement = new AggregationBinaryParseElement(aggregatorParsers, indicesQueriesRegistry); + AggregationPhase aggPhase = new AggregationPhase(aggParseElement, aggBinaryParseElement); + bind(AggregatorParsers.class).toInstance(aggregatorParsers); + ; + bind(AggregationParseElement.class).toInstance(aggParseElement); + bind(AggregationPhase.class).toInstance(aggPhase); } protected void configureSearch() { @@ -334,6 +455,88 @@ public class SearchModule extends AbstractModule { } } + private void configureShapes() { + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, CircleBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, EnvelopeBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPointBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, LineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiLineStringBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, PolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, MultiPolygonBuilder.PROTOTYPE); + namedWriteableRegistry.registerPrototype(ShapeBuilder.class, GeometryCollectionBuilder.PROTOTYPE); + } + } + + private void registerBuiltinFunctionScoreParsers() { + registerFunctionScoreParser(new ScriptScoreFunctionParser()); + registerFunctionScoreParser(new GaussDecayFunctionParser()); + registerFunctionScoreParser(new LinearDecayFunctionParser()); + registerFunctionScoreParser(new ExponentialDecayFunctionParser()); + registerFunctionScoreParser(new RandomScoreFunctionParser()); + registerFunctionScoreParser(new FieldValueFactorFunctionParser()); + //weight doesn't have its own parser, so every function supports it out of the box. + //Can be a single function too when not associated to any other function, which is why it needs to be registered manually here. + namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, new WeightBuilder()); + } + + private void registerBuiltinQueryParsers() { + registerQueryParser(MatchQueryParser::new); + registerQueryParser(MultiMatchQueryParser::new); + registerQueryParser(NestedQueryParser::new); + registerQueryParser(HasChildQueryParser::new); + registerQueryParser(HasParentQueryParser::new); + registerQueryParser(DisMaxQueryParser::new); + registerQueryParser(IdsQueryParser::new); + registerQueryParser(MatchAllQueryParser::new); + registerQueryParser(QueryStringQueryParser::new); + registerQueryParser(BoostingQueryParser::new); + BooleanQuery.setMaxClauseCount(settings.getAsInt("index.query.bool.max_clause_count", settings.getAsInt("indices.query.bool.max_clause_count", BooleanQuery.getMaxClauseCount()))); + registerQueryParser(BoolQueryParser::new); + registerQueryParser(TermQueryParser::new); + registerQueryParser(TermsQueryParser::new); + registerQueryParser(FuzzyQueryParser::new); + registerQueryParser(RegexpQueryParser::new); + registerQueryParser(RangeQueryParser::new); + registerQueryParser(PrefixQueryParser::new); + registerQueryParser(WildcardQueryParser::new); + registerQueryParser(ConstantScoreQueryParser::new); + registerQueryParser(SpanTermQueryParser::new); + registerQueryParser(SpanNotQueryParser::new); + registerQueryParser(SpanWithinQueryParser::new); + registerQueryParser(SpanContainingQueryParser::new); + registerQueryParser(FieldMaskingSpanQueryParser::new); + registerQueryParser(SpanFirstQueryParser::new); + registerQueryParser(SpanNearQueryParser::new); + registerQueryParser(SpanOrQueryParser::new); + registerQueryParser(MoreLikeThisQueryParser::new); + registerQueryParser(WrapperQueryParser::new); + registerQueryParser(IndicesQueryParser::new); + registerQueryParser(CommonTermsQueryParser::new); + registerQueryParser(SpanMultiTermQueryParser::new); + // This is delayed until configure time to give plugins a chance to register parsers + registerQueryParser(() -> new FunctionScoreQueryParser(new ScoreFunctionParserMapper(functionScoreParsers))); + registerQueryParser(SimpleQueryStringParser::new); + registerQueryParser(TemplateQueryParser::new); + registerQueryParser(TypeQueryParser::new); + registerQueryParser(ScriptQueryParser::new); + registerQueryParser(GeoDistanceQueryParser::new); + registerQueryParser(GeoDistanceRangeQueryParser::new); + registerQueryParser(GeoBoundingBoxQueryParser::new); + registerQueryParser(GeohashCellQuery.Parser::new); + registerQueryParser(GeoPolygonQueryParser::new); + registerQueryParser(ExistsQueryParser::new); + registerQueryParser(MatchNoneQueryParser::new); + registerQueryParser(ParentIdQueryParser::new); + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { + registerQueryParser(GeoShapeQueryParser::new); + } + // EmptyQueryBuilder is not registered as query parser but used internally. + // We need to register it with the NamedWriteableRegistry in order to serialize it + namedWriteableRegistry.registerPrototype(QueryBuilder.class, EmptyQueryBuilder.PROTOTYPE); + } + static { // calcs InternalAvg.registerStreams(); @@ -395,5 +598,4 @@ public class SearchModule extends AbstractModule { BucketSelectorPipelineAggregator.registerStreams(); SerialDiffPipelineAggregator.registerStreams(); } - } diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index a8efc0b14f2..a91014dea34 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -122,9 +122,10 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; */ public class SearchService extends AbstractLifecycleComponent implements IndexEventListener { - public static final String NORMS_LOADING_KEY = "index.norms.loading"; - public static final String DEFAULT_KEEPALIVE_KEY = "search.default_keep_alive"; - public static final String KEEPALIVE_INTERVAL_KEY = "search.keep_alive_interval"; + public static final Setting INDEX_NORMS_LOADING_SETTING = new Setting<>("index.norms.loading", Loading.LAZY.toString(), (s) -> Loading.parse(s, Loading.LAZY), false, Setting.Scope.INDEX); + // we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes + public static final Setting DEFAULT_KEEPALIVE_SETTING = Setting.positiveTimeSetting("search.default_keep_alive", timeValueMinutes(5), false, Setting.Scope.CLUSTER); + public static final Setting KEEPALIVE_INTERVAL_SETTING = Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), false, Setting.Scope.CLUSTER); public static final TimeValue NO_TIMEOUT = timeValueMillis(-1); public static final Setting DEFAULT_SEARCH_TIMEOUT_SETTING = Setting.timeSetting("search.default_search_timeout", NO_TIMEOUT, true, Setting.Scope.CLUSTER); @@ -184,9 +185,8 @@ public class SearchService extends AbstractLifecycleComponent imp this.fetchPhase = fetchPhase; this.indicesQueryCache = indicesQueryCache; - TimeValue keepAliveInterval = settings.getAsTime(KEEPALIVE_INTERVAL_KEY, timeValueMinutes(1)); - // we can have 5 minutes here, since we make sure to clean with search requests and when shard/index closes - this.defaultKeepAlive = settings.getAsTime(DEFAULT_KEEPALIVE_KEY, timeValueMinutes(5)).millis(); + TimeValue keepAliveInterval = KEEPALIVE_INTERVAL_SETTING.get(settings); + this.defaultKeepAlive = DEFAULT_KEEPALIVE_SETTING.get(settings).millis(); Map elementParsers = new HashMap<>(); elementParsers.putAll(dfsPhase.parseElements()); @@ -966,7 +966,7 @@ public class SearchService extends AbstractLifecycleComponent imp } @Override public TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) { - final Loading defaultLoading = Loading.parse(indexShard.getIndexSettings().getSettings().get(NORMS_LOADING_KEY), Loading.LAZY); + final Loading defaultLoading = indexShard.indexSettings().getValue(INDEX_NORMS_LOADING_SETTING); final MapperService mapperService = indexShard.mapperService(); final ObjectSet warmUp = new ObjectHashSet<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { @@ -1037,22 +1037,8 @@ public class SearchService extends AbstractLifecycleComponent imp final Map warmUp = new HashMap<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { - final FieldDataType fieldDataType; - final String indexName; - if (fieldMapper instanceof ParentFieldMapper) { - MappedFieldType joinFieldType = ((ParentFieldMapper) fieldMapper).getChildJoinFieldType(); - if (joinFieldType == null) { - continue; - } - fieldDataType = joinFieldType.fieldDataType(); - // TODO: this can be removed in 3.0 when the old parent/child impl is removed: - // related to: https://github.com/elastic/elasticsearch/pull/12418 - indexName = fieldMapper.fieldType().name(); - } else { - fieldDataType = fieldMapper.fieldType().fieldDataType(); - indexName = fieldMapper.fieldType().name(); - } - + final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType(); + final String indexName = fieldMapper.fieldType().name(); if (fieldDataType == null) { continue; } @@ -1105,21 +1091,8 @@ public class SearchService extends AbstractLifecycleComponent imp final Map warmUpGlobalOrdinals = new HashMap<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { - final FieldDataType fieldDataType; - final String indexName; - if (fieldMapper instanceof ParentFieldMapper) { - MappedFieldType joinFieldType = ((ParentFieldMapper) fieldMapper).getChildJoinFieldType(); - if (joinFieldType == null) { - continue; - } - fieldDataType = joinFieldType.fieldDataType(); - // TODO: this can be removed in 3.0 when the old parent/child impl is removed: - // related to: https://github.com/elastic/elasticsearch/pull/12418 - indexName = fieldMapper.fieldType().name(); - } else { - fieldDataType = fieldMapper.fieldType().fieldDataType(); - indexName = fieldMapper.fieldType().name(); - } + final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType(); + final String indexName = fieldMapper.fieldType().name(); if (fieldDataType == null) { continue; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java index 07ba8a9cd15..7e70ffa7b4e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.bucket.filter; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -50,13 +51,13 @@ public class FilterAggregator extends SingleBucketAggregator { private final Weight filter; public FilterAggregator(String name, - Query filter, + Weight filter, AggregatorFactories factories, AggregationContext aggregationContext, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); - this.filter = aggregationContext.searchContext().searcher().createNormalizedWeight(filter, false); + this.filter = filter; } @Override @@ -102,11 +103,23 @@ public class FilterAggregator extends SingleBucketAggregator { this.filter = filter; } + // TODO: refactor in order to initialize the factory once with its parent, + // the context, etc. and then have a no-arg lightweight create method + // (since create may be called thousands of times) + + private IndexSearcher searcher; + private Weight weight; + @Override public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) throws IOException { - Query filter = this.filter.toQuery(context.searchContext().indexShard().getQueryShardContext()); - return new FilterAggregator(name, filter, factories, context, parent, pipelineAggregators, metaData); + IndexSearcher contextSearcher = context.searchContext().searcher(); + if (searcher != contextSearcher) { + searcher = contextSearcher; + Query filter = this.filter.toQuery(context.searchContext().indexShard().getQueryShardContext()); + weight = contextSearcher.createNormalizedWeight(filter, false); + } + return new FilterAggregator(name, weight, factories, context, parent, pipelineAggregators, metaData); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java index 51ad3ae9366..955802ec2ff 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.filters; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -87,7 +88,7 @@ public class FiltersAggregator extends BucketsAggregator { String key = in.readString(); QueryBuilder filter = in.readQuery(); return new KeyedFilter(key, filter); - } + } @Override public void writeTo(StreamOutput out) throws IOException { @@ -115,32 +116,26 @@ public class FiltersAggregator extends BucketsAggregator { } private final String[] keys; - private final Weight[] filters; + private Weight[] filters; private final boolean keyed; private final boolean showOtherBucket; private final String otherBucketKey; private final int totalNumKeys; - public FiltersAggregator(String name, AggregatorFactories factories, List filters, boolean keyed, String otherBucketKey, + public FiltersAggregator(String name, AggregatorFactories factories, String[] keys, Weight[] filters, boolean keyed, String otherBucketKey, AggregationContext aggregationContext, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); this.keyed = keyed; - this.keys = new String[filters.size()]; - this.filters = new Weight[filters.size()]; + this.keys = keys; + this.filters = filters; this.showOtherBucket = otherBucketKey != null; this.otherBucketKey = otherBucketKey; if (showOtherBucket) { - this.totalNumKeys = filters.size() + 1; + this.totalNumKeys = keys.length + 1; } else { - this.totalNumKeys = filters.size(); - } - for (int i = 0; i < filters.size(); ++i) { - KeyedFilter keyedFilter = filters.get(i); - this.keys[i] = keyedFilter.key; - Query filter = keyedFilter.filter.toFilter(context.searchContext().indexShard().getQueryShardContext()); - this.filters[i] = aggregationContext.searchContext().searcher().createNormalizedWeight(filter, false); + this.totalNumKeys = keys.length; } } @@ -269,10 +264,30 @@ public class FiltersAggregator extends BucketsAggregator { return otherBucketKey; } + // TODO: refactor in order to initialize the factory once with its parent, + // the context, etc. and then have a no-arg lightweight create method + // (since create may be called thousands of times) + + private IndexSearcher searcher; + private String[] keys; + private Weight[] weights; + @Override public Aggregator createInternal(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) throws IOException { - return new FiltersAggregator(name, factories, filters, keyed, otherBucket ? otherBucketKey : null, context, parent, + IndexSearcher contextSearcher = context.searchContext().searcher(); + if (searcher != contextSearcher) { + searcher = contextSearcher; + weights = new Weight[filters.size()]; + keys = new String[filters.size()]; + for (int i = 0; i < filters.size(); ++i) { + KeyedFilter keyedFilter = filters.get(i); + this.keys[i] = keyedFilter.key; + Query filter = keyedFilter.filter.toFilter(context.searchContext().indexShard().getQueryShardContext()); + this.weights[i] = contextSearcher.createNormalizedWeight(filter, false); + } + } + return new FiltersAggregator(name, factories, keys, weights, keyed, otherBucket ? otherBucketKey : null, context, parent, pipelineAggregators, metaData); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java index 9dd3d0796b0..2956a2d4a7a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java @@ -156,10 +156,8 @@ public class ScriptHeuristic extends SignificanceHeuristic { } public static class ScriptHeuristicParser implements SignificanceHeuristicParser { - private final ScriptService scriptService; - public ScriptHeuristicParser(ScriptService scriptService) { - this.scriptService = scriptService; + public ScriptHeuristicParser() { } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParserMapper.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParserMapper.java index 07e74ec6f91..a9b385c2ada 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParserMapper.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/SignificanceHeuristicParserMapper.java @@ -21,8 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.script.ScriptService; - import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -33,14 +31,14 @@ public class SignificanceHeuristicParserMapper { protected final Map significanceHeuristicParsers; @Inject - public SignificanceHeuristicParserMapper(Set parsers, ScriptService scriptService) { + public SignificanceHeuristicParserMapper(Set parsers) { Map map = new HashMap<>(); add(map, new JLHScore.JLHScoreParser()); add(map, new PercentageScore.PercentageScoreParser()); add(map, new MutualInformation.MutualInformationParser()); add(map, new ChiSquare.ChiSquareParser()); add(map, new GND.GNDParser()); - add(map, new ScriptHeuristic.ScriptHeuristicParser(scriptService)); + add(map, new ScriptHeuristic.ScriptHeuristicParser()); for (SignificanceHeuristicParser parser : parsers) { add(map, parser); } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java index 125563cd098..035df6f063a 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.DocValuesTermsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; @@ -284,20 +285,18 @@ public final class InnerHitsContext { @Override public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException { - final String field; - final String term; + final Query hitQuery; if (isParentHit(hitContext.hit())) { - field = ParentFieldMapper.NAME; - term = Uid.createUid(hitContext.hit().type(), hitContext.hit().id()); + String field = ParentFieldMapper.joinField(hitContext.hit().type()); + hitQuery = new DocValuesTermsQuery(field, hitContext.hit().id()); } else if (isChildHit(hitContext.hit())) { DocumentMapper hitDocumentMapper = mapperService.documentMapper(hitContext.hit().type()); final String parentType = hitDocumentMapper.parentFieldMapper().type(); - field = UidFieldMapper.NAME; SearchHitField parentField = hitContext.hit().field(ParentFieldMapper.NAME); if (parentField == null) { throw new IllegalStateException("All children must have a _parent"); } - term = Uid.createUid(parentType, (String) parentField.getValue()); + hitQuery = new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUid(parentType, parentField.getValue()))); } else { return Lucene.EMPTY_TOP_DOCS; } @@ -305,9 +304,9 @@ public final class InnerHitsContext { BooleanQuery q = new BooleanQuery.Builder() .add(query.query(), Occur.MUST) // Only include docs that have the current hit as parent - .add(new TermQuery(new Term(field, term)), Occur.MUST) + .add(hitQuery, Occur.FILTER) // Only include docs that have this inner hits type - .add(documentMapper.typeFilter(), Occur.MUST) + .add(documentMapper.typeFilter(), Occur.FILTER) .build(); if (size() == 0) { final int count = context.searcher().count(q); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/parent/ParentFieldSubFetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/parent/ParentFieldSubFetchPhase.java new file mode 100644 index 00000000000..41fe7176424 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/fetch/parent/ParentFieldSubFetchPhase.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fetch.parent; + +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.search.SearchHitField; +import org.elasticsearch.search.SearchParseElement; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; +import org.elasticsearch.search.internal.InternalSearchHit; +import org.elasticsearch.search.internal.InternalSearchHitField; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public class ParentFieldSubFetchPhase implements FetchSubPhase { + + @Override + public Map parseElements() { + return Collections.emptyMap(); + } + + @Override + public boolean hitExecutionNeeded(SearchContext context) { + return true; + } + + @Override + public void hitExecute(SearchContext context, HitContext hitContext) { + ParentFieldMapper parentFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).parentFieldMapper(); + if (parentFieldMapper.active() == false) { + return; + } + + String parentId = getParentId(parentFieldMapper, hitContext.reader(), hitContext.docId()); + Map fields = hitContext.hit().fieldsOrNull(); + if (fields == null) { + fields = new HashMap<>(); + hitContext.hit().fields(fields); + } + fields.put(ParentFieldMapper.NAME, new InternalSearchHitField(ParentFieldMapper.NAME, Collections.singletonList(parentId))); + } + + @Override + public boolean hitsExecutionNeeded(SearchContext context) { + return false; + } + + @Override + public void hitsExecute(SearchContext context, InternalSearchHit[] hits) { + } + + public static String getParentId(ParentFieldMapper fieldMapper, LeafReader reader, int docId) { + try { + SortedDocValues docValues = reader.getSortedDocValues(fieldMapper.name()); + BytesRef parentId = docValues.get(docId); + assert parentId.length > 0; + return parentId.utf8ToString(); + } catch (IOException e) { + throw ExceptionsHelper.convertToElastic(e); + } + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 18a68875e8b..6159f678fd1 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.internal; +import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -26,6 +27,7 @@ import org.apache.lucene.search.Collector; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; @@ -38,12 +40,14 @@ import org.elasticsearch.common.lucene.search.function.WeightFactorFunction; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.ParsedQuery; @@ -79,20 +83,6 @@ import java.util.Map; * */ public class DefaultSearchContext extends SearchContext { - /** - * Index setting describing the maximum value of from + size on a query. - */ - public static final String MAX_RESULT_WINDOW = "index.max_result_window"; - public static class Defaults { - /** - * Default maximum value of from + size on a query. 10,000 was chosen as - * a conservative default as it is sure to not cause trouble. Users can - * certainly profile their cluster and decide to set it to 100,000 - * safely. 1,000,000 is probably way to high for any cluster to set - * safely. - */ - public static final int MAX_RESULT_WINDOW = 10000; - } private final long id; private final ShardSearchRequest request; @@ -203,13 +193,13 @@ public class DefaultSearchContext extends SearchContext { long resultWindow = from + size; // We need settingsService's view of the settings because its dynamic. // indexService's isn't. - int maxResultWindow = indexService.getIndexSettings().getSettings().getAsInt(MAX_RESULT_WINDOW, Defaults.MAX_RESULT_WINDOW); + int maxResultWindow = indexService.getIndexSettings().getMaxResultWindow(); if (resultWindow > maxResultWindow) { throw new QueryPhaseExecutionException(this, "Result window is too large, from + size must be less than or equal to: [" + maxResultWindow + "] but was [" + resultWindow + "]. See the scroll api for a more efficient way to request large data sets. " - + "This limit can be set by changing the [" + DefaultSearchContext.MAX_RESULT_WINDOW + + "This limit can be set by changing the [" + IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey() + "] index level parameter."); } } @@ -252,19 +242,37 @@ public class DefaultSearchContext extends SearchContext { } @Override + @Nullable public Query searchFilter(String[] types) { - Query filter = mapperService().searchFilter(types); - if (filter == null && aliasFilter == null) { + return createSearchFilter(types, aliasFilter, mapperService().hasNested()); + } + + // extracted to static helper method to make writing unit tests easier: + static Query createSearchFilter(String[] types, Query aliasFilter, boolean hasNestedFields) { + Query typesFilter = null; + if (types != null && types.length >= 1) { + BytesRef[] typesBytes = new BytesRef[types.length]; + for (int i = 0; i < typesBytes.length; i++) { + typesBytes[i] = new BytesRef(types[i]); + } + typesFilter = new TermsQuery(TypeFieldMapper.NAME, typesBytes); + } + + if (typesFilter == null && aliasFilter == null && hasNestedFields == false) { return null; } + BooleanQuery.Builder bq = new BooleanQuery.Builder(); - if (filter != null) { - bq.add(filter, Occur.MUST); + if (typesFilter != null) { + bq.add(typesFilter, Occur.FILTER); + } else if (hasNestedFields) { + bq.add(Queries.newNonNestedFilter(), Occur.FILTER); } if (aliasFilter != null) { - bq.add(aliasFilter, Occur.MUST); + bq.add(aliasFilter, Occur.FILTER); } - return new ConstantScoreQuery(bq.build()); + + return bq.build(); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java b/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java index cb2bca28fb3..001924d1bdf 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortOrder.java @@ -19,12 +19,19 @@ package org.elasticsearch.search.sort; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Locale; + /** * A sorting order. * * */ -public enum SortOrder { +public enum SortOrder implements Writeable { /** * Ascending order. */ @@ -42,5 +49,30 @@ public enum SortOrder { public String toString() { return "desc"; } + }; + + public static final SortOrder DEFAULT = DESC; + private static final SortOrder PROTOTYPE = DEFAULT; + + @Override + public SortOrder readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown SortOrder ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + public static SortOrder readOrderFrom(StreamInput in) throws IOException { + return PROTOTYPE.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.ordinal()); + } + + public static SortOrder fromString(String op) { + return valueOf(op.toUpperCase(Locale.ROOT)); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java index 62689e65832..2509f792ecc 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/SuggestUtils.java @@ -210,15 +210,20 @@ public final class SuggestUtils { public static final ParseField MIN_WORD_LENGTH = new ParseField("min_word_length", "min_word_len"); public static final ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq"); public static final ParseField SHARD_SIZE = new ParseField("shard_size"); + public static final ParseField ANALYZER = new ParseField("analyzer"); + public static final ParseField FIELD = new ParseField("field"); + public static final ParseField SIZE = new ParseField("size"); + public static final ParseField SORT = new ParseField("sort"); + public static final ParseField ACCURACY = new ParseField("accuracy"); } public static boolean parseDirectSpellcheckerSettings(XContentParser parser, String fieldName, DirectSpellcheckerSettings suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException { - if ("accuracy".equals(fieldName)) { + if (parseFieldMatcher.match(fieldName, Fields.ACCURACY)) { suggestion.accuracy(parser.floatValue()); } else if (parseFieldMatcher.match(fieldName, Fields.SUGGEST_MODE)) { suggestion.suggestMode(SuggestUtils.resolveSuggestMode(parser.text())); - } else if ("sort".equals(fieldName)) { + } else if (parseFieldMatcher.match(fieldName, Fields.SORT)) { suggestion.sort(SuggestUtils.resolveSort(parser.text())); } else if (parseFieldMatcher.match(fieldName, Fields.STRING_DISTANCE)) { suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text())); @@ -246,16 +251,16 @@ public final class SuggestUtils { public static boolean parseSuggestContext(XContentParser parser, MapperService mapperService, String fieldName, SuggestionSearchContext.SuggestionContext suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException { - if ("analyzer".equals(fieldName)) { + if (parseFieldMatcher.match(fieldName, Fields.ANALYZER)) { String analyzerName = parser.text(); Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName); if (analyzer == null) { throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists"); } suggestion.setAnalyzer(analyzer); - } else if ("field".equals(fieldName)) { + } else if (parseFieldMatcher.match(fieldName, Fields.FIELD)) { suggestion.setField(parser.text()); - } else if ("size".equals(fieldName)) { + } else if (parseFieldMatcher.match(fieldName, Fields.SIZE)) { suggestion.setSize(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.SHARD_SIZE)) { suggestion.setShardSize(parser.intValue()); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java index a2e5f743c59..887abccca46 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.HasContextAndHeaders; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ObjectParser; @@ -34,7 +33,10 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.index.query.RegexpFlag; import org.elasticsearch.search.suggest.SuggestContextParser; +import org.elasticsearch.search.suggest.SuggestUtils.Fields; import org.elasticsearch.search.suggest.SuggestionSearchContext; +import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.FuzzyOptionsBuilder; +import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.RegexOptionsBuilder; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; @@ -74,29 +76,29 @@ import java.util.Map; */ public class CompletionSuggestParser implements SuggestContextParser { - private static ObjectParser TLP_PARSER = new ObjectParser<>("completion", null); - private static ObjectParser REGEXP_PARSER = new ObjectParser<>("regexp", CompletionSuggestionBuilder.RegexOptionsBuilder::new); - private static ObjectParser FUZZY_PARSER = new ObjectParser<>("fuzzy", CompletionSuggestionBuilder.FuzzyOptionsBuilder::new); + private static ObjectParser TLP_PARSER = new ObjectParser<>(CompletionSuggestionBuilder.SUGGESTION_NAME, null); + private static ObjectParser REGEXP_PARSER = new ObjectParser<>(RegexOptionsBuilder.REGEX_OPTIONS.getPreferredName(), CompletionSuggestionBuilder.RegexOptionsBuilder::new); + private static ObjectParser FUZZY_PARSER = new ObjectParser<>(FuzzyOptionsBuilder.FUZZY_OPTIONS.getPreferredName(), CompletionSuggestionBuilder.FuzzyOptionsBuilder::new); static { - FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyMinLength, new ParseField("min_length")); - FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setMaxDeterminizedStates, new ParseField("max_determinized_states")); - FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setUnicodeAware, new ParseField("unicode_aware")); - FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyPrefixLength, new ParseField("prefix_length")); - FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setTranspositions, new ParseField("transpositions")); + FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyMinLength, FuzzyOptionsBuilder.MIN_LENGTH_FIELD); + FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setMaxDeterminizedStates, FuzzyOptionsBuilder.MAX_DETERMINIZED_STATES_FIELD); + FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setUnicodeAware, FuzzyOptionsBuilder.UNICODE_AWARE_FIELD); + FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyPrefixLength, FuzzyOptionsBuilder.PREFIX_LENGTH_FIELD); + FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setTranspositions, FuzzyOptionsBuilder.TRANSPOSITION_FIELD); FUZZY_PARSER.declareValue((a, b) -> { try { a.setFuzziness(Fuzziness.parse(b).asDistance()); } catch (IOException e) { throw new ElasticsearchException(e); } - }, new ParseField("fuzziness")); - REGEXP_PARSER.declareInt(CompletionSuggestionBuilder.RegexOptionsBuilder::setMaxDeterminizedStates, new ParseField("max_determinized_states")); - REGEXP_PARSER.declareStringOrNull(CompletionSuggestionBuilder.RegexOptionsBuilder::setFlags, new ParseField("flags")); + }, Fuzziness.FIELD); + REGEXP_PARSER.declareInt(CompletionSuggestionBuilder.RegexOptionsBuilder::setMaxDeterminizedStates, RegexOptionsBuilder.MAX_DETERMINIZED_STATES); + REGEXP_PARSER.declareStringOrNull(CompletionSuggestionBuilder.RegexOptionsBuilder::setFlags, RegexOptionsBuilder.FLAGS_VALUE); - TLP_PARSER.declareStringArray(CompletionSuggestionContext::setPayloadFields, new ParseField("payload")); - TLP_PARSER.declareObjectOrDefault(CompletionSuggestionContext::setFuzzyOptionsBuilder, FUZZY_PARSER, CompletionSuggestionBuilder.FuzzyOptionsBuilder::new, new ParseField("fuzzy")); - TLP_PARSER.declareObject(CompletionSuggestionContext::setRegexOptionsBuilder, REGEXP_PARSER, new ParseField("regexp")); - TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, new ParseField("field")); + TLP_PARSER.declareStringArray(CompletionSuggestionContext::setPayloadFields, CompletionSuggestionBuilder.PAYLOAD_FIELD); + TLP_PARSER.declareObjectOrDefault(CompletionSuggestionContext::setFuzzyOptionsBuilder, FUZZY_PARSER, CompletionSuggestionBuilder.FuzzyOptionsBuilder::new, FuzzyOptionsBuilder.FUZZY_OPTIONS); + TLP_PARSER.declareObject(CompletionSuggestionContext::setRegexOptionsBuilder, REGEXP_PARSER, RegexOptionsBuilder.REGEX_OPTIONS); + TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, Fields.FIELD); TLP_PARSER.declareField((p, v, c) -> { String analyzerName = p.text(); Analyzer analyzer = c.mapperService.analysisService().analyzer(analyzerName); @@ -104,10 +106,9 @@ public class CompletionSuggestParser implements SuggestContextParser { throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists"); } v.setAnalyzer(analyzer); - }, new ParseField("analyzer"), ObjectParser.ValueType.STRING); - TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, new ParseField("analyzer")); - TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setSize, new ParseField("size")); - TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setShardSize, new ParseField("size")); + }, Fields.ANALYZER, ObjectParser.ValueType.STRING); + TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setSize, Fields.SIZE); + TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setShardSize, Fields.SHARD_SIZE); TLP_PARSER.declareField((p, v, c) -> { // Copy the current structure. We will parse, once the mapping is provided XContentBuilder builder = XContentFactory.contentBuilder(p.contentType()); @@ -115,7 +116,7 @@ public class CompletionSuggestParser implements SuggestContextParser { BytesReference bytes = builder.bytes(); c.contextParser = XContentFactory.xContent(bytes).createParser(bytes); p.skipChildren(); - }, new ParseField("contexts", "context"), ObjectParser.ValueType.OBJECT); // context is deprecated + }, CompletionSuggestionBuilder.CONTEXTS_FIELD, ObjectParser.ValueType.OBJECT); // context is deprecated } private static class ContextAndSuggest { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 100e701c03c..9cf78ea6677 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -45,19 +46,30 @@ import java.util.Set; * indexing. */ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilder { + + final static String SUGGESTION_NAME = "completion"; + static final ParseField PAYLOAD_FIELD = new ParseField("payload"); + static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context"); private FuzzyOptionsBuilder fuzzyOptionsBuilder; private RegexOptionsBuilder regexOptionsBuilder; private final Map> queryContexts = new HashMap<>(); private final Set payloadFields = new HashSet<>(); public CompletionSuggestionBuilder(String name) { - super(name, "completion"); + super(name, SUGGESTION_NAME); } /** * Options for fuzzy queries */ public static class FuzzyOptionsBuilder implements ToXContent { + static final ParseField FUZZY_OPTIONS = new ParseField("fuzzy"); + static final ParseField TRANSPOSITION_FIELD = new ParseField("transpositions"); + static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length"); + static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length"); + static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware"); + static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states"); + private int editDistance = FuzzyCompletionQuery.DEFAULT_MAX_EDITS; private boolean transpositions = FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS; private int fuzzyMinLength = FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH; @@ -179,13 +191,13 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject("fuzzy"); + builder.startObject(FUZZY_OPTIONS.getPreferredName()); builder.field(Fuzziness.FIELD.getPreferredName(), editDistance); - builder.field("transpositions", transpositions); - builder.field("min_length", fuzzyMinLength); - builder.field("prefix_length", fuzzyPrefixLength); - builder.field("unicode_aware", unicodeAware); - builder.field("max_determinized_states", maxDeterminizedStates); + builder.field(TRANSPOSITION_FIELD.getPreferredName(), transpositions); + builder.field(MIN_LENGTH_FIELD.getPreferredName(), fuzzyMinLength); + builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), fuzzyPrefixLength); + builder.field(UNICODE_AWARE_FIELD.getPreferredName(), unicodeAware); + builder.field(MAX_DETERMINIZED_STATES_FIELD.getPreferredName(), maxDeterminizedStates); builder.endObject(); return builder; } @@ -195,6 +207,9 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde * Options for regular expression queries */ public static class RegexOptionsBuilder implements ToXContent { + static final ParseField REGEX_OPTIONS = new ParseField("regex"); + static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value"); + static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states"); private int flagsValue = RegExp.ALL; private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES; @@ -228,9 +243,9 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject("regex"); - builder.field("flags_value", flagsValue); - builder.field("max_determinized_states", maxDeterminizedStates); + builder.startObject(REGEX_OPTIONS.getPreferredName()); + builder.field(FLAGS_VALUE.getPreferredName(), flagsValue); + builder.field(MAX_DETERMINIZED_STATES.getPreferredName(), maxDeterminizedStates); builder.endObject(); return builder; } @@ -322,7 +337,7 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde @Override protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { if (payloadFields != null) { - builder.startArray("payload"); + builder.startArray(PAYLOAD_FIELD.getPreferredName()); for (String field : payloadFields) { builder.value(field); } @@ -335,7 +350,7 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde regexOptionsBuilder.toXContent(builder, params); } if (queryContexts.isEmpty() == false) { - builder.startObject("contexts"); + builder.startObject(CONTEXTS_FIELD.getPreferredName()); for (Map.Entry> entry : this.queryContexts.entrySet()) { builder.startArray(entry.getKey()); for (ToXContent queryContext : entry.getValue()) { diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java index 6a6a6c38011..8ea170993e5 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java @@ -117,13 +117,9 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import static java.util.Collections.unmodifiableMap; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING_CLIENT; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_BLOCKING_SERVER; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_CONNECT_TIMEOUT; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_CONNECT_TIMEOUT; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_RECEIVE_BUFFER_SIZE; -import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_DEFAULT_SEND_BUFFER_SIZE; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_KEEP_ALIVE; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_NO_DELAY; import static org.elasticsearch.common.network.NetworkService.TcpSettings.TCP_RECEIVE_BUFFER_SIZE; @@ -221,8 +217,8 @@ public class NettyTransport extends AbstractLifecycleComponent implem } this.workerCount = settings.getAsInt(WORKER_COUNT, EsExecutors.boundedNumberOfProcessors(settings) * 2); - this.blockingClient = settings.getAsBoolean("transport.netty.transport.tcp.blocking_client", settings.getAsBoolean(TCP_BLOCKING_CLIENT, settings.getAsBoolean(TCP_BLOCKING, false))); - this.connectTimeout = this.settings.getAsTime("transport.netty.connect_timeout", settings.getAsTime("transport.tcp.connect_timeout", settings.getAsTime(TCP_CONNECT_TIMEOUT, TCP_DEFAULT_CONNECT_TIMEOUT))); + this.blockingClient = settings.getAsBoolean("transport.netty.transport.tcp.blocking_client", TCP_BLOCKING_CLIENT.get(settings)); + this.connectTimeout = this.settings.getAsTime("transport.netty.connect_timeout", settings.getAsTime("transport.tcp.connect_timeout", TCP_CONNECT_TIMEOUT.get(settings))); this.maxCumulationBufferCapacity = this.settings.getAsBytesSize("transport.netty.max_cumulation_buffer_capacity", null); this.maxCompositeBufferComponents = this.settings.getAsInt("transport.netty.max_composite_buffer_components", -1); this.compress = Transport.TRANSPORT_TCP_COMPRESS.get(settings); @@ -362,29 +358,25 @@ public class NettyTransport extends AbstractLifecycleComponent implem clientBootstrap.setPipelineFactory(configureClientChannelPipelineFactory()); clientBootstrap.setOption("connectTimeoutMillis", connectTimeout.millis()); - String tcpNoDelay = settings.get("transport.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true")); - if (!"default".equals(tcpNoDelay)) { - clientBootstrap.setOption("tcpNoDelay", Booleans.parseBoolean(tcpNoDelay, null)); - } + boolean tcpNoDelay = settings.getAsBoolean("transport.netty.tcp_no_delay", TCP_NO_DELAY.get(settings)); + clientBootstrap.setOption("tcpNoDelay", tcpNoDelay); - String tcpKeepAlive = settings.get("transport.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true")); - if (!"default".equals(tcpKeepAlive)) { - clientBootstrap.setOption("keepAlive", Booleans.parseBoolean(tcpKeepAlive, null)); - } + boolean tcpKeepAlive = settings.getAsBoolean("transport.netty.tcp_keep_alive", TCP_KEEP_ALIVE.get(settings)); + clientBootstrap.setOption("keepAlive", tcpKeepAlive); - ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE)); - if (tcpSendBufferSize != null && tcpSendBufferSize.bytes() > 0) { + ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", TCP_SEND_BUFFER_SIZE.get(settings)); + if (tcpSendBufferSize.bytes() > 0) { clientBootstrap.setOption("sendBufferSize", tcpSendBufferSize.bytes()); } - ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE)); - if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) { + ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", TCP_RECEIVE_BUFFER_SIZE.get(settings)); + if (tcpReceiveBufferSize.bytes() > 0) { clientBootstrap.setOption("receiveBufferSize", tcpReceiveBufferSize.bytes()); } clientBootstrap.setOption("receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory); - boolean reuseAddress = settings.getAsBoolean("transport.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress())); + boolean reuseAddress = settings.getAsBoolean("transport.netty.reuse_address", TCP_REUSE_ADDRESS.get(settings)); clientBootstrap.setOption("reuseAddress", reuseAddress); return clientBootstrap; @@ -403,26 +395,22 @@ public class NettyTransport extends AbstractLifecycleComponent implem fallbackSettingsBuilder.put("publish_host", fallbackPublishHost); } - String fallbackTcpNoDelay = settings.get("transport.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true")); - if (fallbackTcpNoDelay != null) { - fallbackSettingsBuilder.put("tcp_no_delay", fallbackTcpNoDelay); - } + boolean fallbackTcpNoDelay = settings.getAsBoolean("transport.netty.tcp_no_delay", TCP_NO_DELAY.get(settings)); + fallbackSettingsBuilder.put("tcp_no_delay", fallbackTcpNoDelay); - String fallbackTcpKeepAlive = settings.get("transport.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true")); - if (fallbackTcpKeepAlive != null) { + boolean fallbackTcpKeepAlive = settings.getAsBoolean("transport.netty.tcp_keep_alive", TCP_KEEP_ALIVE.get(settings)); fallbackSettingsBuilder.put("tcp_keep_alive", fallbackTcpKeepAlive); - } - boolean fallbackReuseAddress = settings.getAsBoolean("transport.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress())); + boolean fallbackReuseAddress = settings.getAsBoolean("transport.netty.reuse_address", TCP_REUSE_ADDRESS.get(settings)); fallbackSettingsBuilder.put("reuse_address", fallbackReuseAddress); - ByteSizeValue fallbackTcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE)); - if (fallbackTcpSendBufferSize != null) { + ByteSizeValue fallbackTcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", TCP_SEND_BUFFER_SIZE.get(settings)); + if (fallbackTcpSendBufferSize.bytes() >= 0) { fallbackSettingsBuilder.put("tcp_send_buffer_size", fallbackTcpSendBufferSize); } - ByteSizeValue fallbackTcpBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE)); - if (fallbackTcpBufferSize != null) { + ByteSizeValue fallbackTcpBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", TCP_RECEIVE_BUFFER_SIZE.get(settings)); + if (fallbackTcpBufferSize.bytes() >= 0) { fallbackSettingsBuilder.put("tcp_receive_buffer_size", fallbackTcpBufferSize); } @@ -552,15 +540,15 @@ public class NettyTransport extends AbstractLifecycleComponent implem } private void createServerBootstrap(String name, Settings settings) { - boolean blockingServer = settings.getAsBoolean("transport.tcp.blocking_server", this.settings.getAsBoolean(TCP_BLOCKING_SERVER, this.settings.getAsBoolean(TCP_BLOCKING, false))); + boolean blockingServer = settings.getAsBoolean("transport.tcp.blocking_server", TCP_BLOCKING_SERVER.get(settings)); String port = settings.get("port"); String bindHost = settings.get("bind_host"); String publishHost = settings.get("publish_host"); String tcpNoDelay = settings.get("tcp_no_delay"); String tcpKeepAlive = settings.get("tcp_keep_alive"); boolean reuseAddress = settings.getAsBoolean("reuse_address", NetworkUtils.defaultReuseAddress()); - ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("tcp_send_buffer_size", TCP_DEFAULT_SEND_BUFFER_SIZE); - ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("tcp_receive_buffer_size", TCP_DEFAULT_RECEIVE_BUFFER_SIZE); + ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("tcp_send_buffer_size", TCP_SEND_BUFFER_SIZE.getDefault(settings)); + ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("tcp_receive_buffer_size", TCP_RECEIVE_BUFFER_SIZE.getDefault(settings)); logger.debug("using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], compress[{}], connect_timeout[{}], connections_per_node[{}/{}/{}/{}/{}], receive_predictor[{}->{}]", name, workerCount, port, bindHost, publishHost, compress, connectTimeout, connectionsPerNodeRecovery, connectionsPerNodeBulk, connectionsPerNodeReg, connectionsPerNodeState, connectionsPerNodePing, receivePredictorMin, receivePredictorMax); diff --git a/core/src/main/java/org/elasticsearch/tribe/TribeService.java b/core/src/main/java/org/elasticsearch/tribe/TribeService.java index 78453c9eac6..48f04e3690e 100644 --- a/core/src/main/java/org/elasticsearch/tribe/TribeService.java +++ b/core/src/main/java/org/elasticsearch/tribe/TribeService.java @@ -44,7 +44,9 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; @@ -100,8 +102,8 @@ public class TribeService extends AbstractLifecycleComponent { // its a tribe configured node..., force settings Settings.Builder sb = Settings.builder().put(settings); sb.put("node.client", true); // this node should just act as a node client - sb.put("discovery.type", "local"); // a tribe node should not use zen discovery - sb.put("discovery.initial_state_timeout", 0); // nothing is going to be discovered, since no master will be elected + sb.put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local"); // a tribe node should not use zen discovery + sb.put(DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0); // nothing is going to be discovered, since no master will be elected if (sb.get("cluster.name") == null) { sb.put("cluster.name", "tribe_" + Strings.randomBase64UUID()); // make sure it won't join other tribe nodes in the same JVM } @@ -132,7 +134,7 @@ public class TribeService extends AbstractLifecycleComponent { for (Map.Entry entry : nodesSettings.entrySet()) { Settings.Builder sb = Settings.builder().put(entry.getValue()); sb.put("name", settings.get("name") + "/" + entry.getKey()); - sb.put("path.home", settings.get("path.home")); // pass through ES home dir + sb.put(Environment.PATH_HOME_SETTING.getKey(), Environment.PATH_HOME_SETTING.get(settings)); // pass through ES home dir sb.put(TRIBE_NAME, entry.getKey()); if (sb.get("http.enabled") == null) { sb.put("http.enabled", false); diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 8db3aca8dc8..2228d0365f7 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1721183.jar}" { +grant codeBase "${codebase.lucene-core-5.5.0-snapshot-1725675.jar}" { // needed to allow MMapDirectory's "unmap hack" permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 419c666d55e..408fdcd855e 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1721183.jar}" { +grant codeBase "${codebase.lucene-test-framework-5.5.0-snapshot-1725675.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 55c10aa298e..d8d4f2656c8 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.cluster.node.tasks; import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; @@ -27,6 +28,7 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.action.support.nodes.BaseNodesRequest; @@ -67,8 +69,10 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReferenceArray; +import static org.elasticsearch.action.support.PlainActionFuture.newFuture; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.not; @@ -418,7 +422,17 @@ public class TransportTasksActionTests extends ESTestCase { return startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request")); } - private ActionFuture startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request) throws InterruptedException { + private ActionFuture startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request) throws InterruptedException { + PlainActionFuture future = newFuture(); + startBlockingTestNodesAction(checkLatch, request, future); + return future; + } + + private Task startBlockingTestNodesAction(CountDownLatch checkLatch, ActionListener listener) throws InterruptedException { + return startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request"), listener); + } + + private Task startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request, ActionListener listener) throws InterruptedException { CountDownLatch actionLatch = new CountDownLatch(nodesCount); TestNodesAction[] actions = new TestNodesAction[nodesCount]; for (int i = 0; i < testNodes.length; i++) { @@ -442,17 +456,31 @@ public class TransportTasksActionTests extends ESTestCase { for (TestNode node : testNodes) { assertEquals(0, node.transportService.getTaskManager().getTasks().size()); } - ActionFuture future = actions[0].execute(request); + Task task = actions[0].execute(request, listener); logger.info("Awaiting for all actions to start"); actionLatch.await(); logger.info("Done waiting for all actions to start"); - return future; + return task; } public void testRunningTasksCount() throws Exception { connectNodes(testNodes); CountDownLatch checkLatch = new CountDownLatch(1); - ActionFuture future = startBlockingTestNodesAction(checkLatch); + CountDownLatch responseLatch = new CountDownLatch(1); + final AtomicReference responseReference = new AtomicReference<>(); + Task mainTask = startBlockingTestNodesAction(checkLatch, new ActionListener() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + responseReference.set(listTasksResponse); + responseLatch.countDown(); + } + + @Override + public void onFailure(Throwable e) { + logger.warn("Couldn't get list of tasks", e); + responseLatch.countDown(); + } + }); // Check task counts using taskManager Map localTasks = testNodes[0].transportService.getTaskManager().getTasks(); @@ -505,9 +533,17 @@ public class TransportTasksActionTests extends ESTestCase { assertEquals("NodeRequest[Test Request, true]", entry.getValue().get(0).getDescription()); } + // Make sure that the main task on coordinating node is the task that was returned to us by execute() + listTasksRequest.actions("testAction"); // only pick the main task + response = testNode.transportListTasksAction.execute(listTasksRequest).get(); + assertEquals(1, response.getTasks().size()); + assertEquals(mainTask.getId(), response.getTasks().get(0).getId()); + // Release all tasks and wait for response checkLatch.countDown(); - NodesResponse responses = future.get(); + assertTrue(responseLatch.await(10, TimeUnit.SECONDS)); + + NodesResponse responses = responseReference.get(); assertEquals(0, responses.failureCount()); // Make sure that we don't have any lingering tasks diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java index a64f67a8cd5..56eaad11f94 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -130,7 +130,7 @@ public class ClusterStatsIT extends ESIntegTestCase { public void testValuesSmokeScreen() throws IOException { internalCluster().ensureAtMostNumDataNodes(5); internalCluster().ensureAtLeastNumDataNodes(1); - assertAcked(prepareCreate("test1").setSettings(settingsBuilder().put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0).build())); + assertAcked(prepareCreate("test1").setSettings(settingsBuilder().put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), 0).build())); index("test1", "type", "1", "f", "f"); /* * Ensure at least one shard is allocated otherwise the FS stats might diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index cb0e0fa0f78..4d53d6cd1e5 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -47,7 +47,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); - Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index 1f0bcf0c17c..292705305cc 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -52,19 +52,13 @@ import static org.hamcrest.core.IsNull.notNullValue; @ClusterScope(scope = Scope.TEST) public class CreateIndexIT extends ESIntegTestCase { - public void testCreationDateGiven() { - prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, 4l)).get(); - ClusterStateResponse response = client().admin().cluster().prepareState().get(); - ClusterState state = response.getState(); - assertThat(state, notNullValue()); - MetaData metadata = state.getMetaData(); - assertThat(metadata, notNullValue()); - ImmutableOpenMap indices = metadata.getIndices(); - assertThat(indices, notNullValue()); - assertThat(indices.size(), equalTo(1)); - IndexMetaData index = indices.get("test"); - assertThat(index, notNullValue()); - assertThat(index.getCreationDate(), equalTo(4l)); + public void testCreationDateGivenFails() { + try { + prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_CREATION_DATE, 4l)).get(); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("unknown setting [index.creation_date]", ex.getMessage()); + } } public void testCreationDateGenerated() { @@ -112,41 +106,27 @@ public class CreateIndexIT extends ESIntegTestCase { } public void testInvalidShardCountSettings() throws Exception { + int value = randomIntBetween(-10, 0); try { prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(-10, 0)) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, value) .build()) .get(); fail("should have thrown an exception about the primary shard count"); } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true)); + assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_shards] must be >= 1", e.getMessage()); } - + value = randomIntBetween(-10, -1); try { prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(-10, -1)) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, value) .build()) .get(); fail("should have thrown an exception about the replica shard count"); } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true)); + assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_replicas] must be >= 0", e.getMessage()); } - try { - prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(-10, 0)) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(-10, -1)) - .build()) - .get(); - fail("should have thrown an exception about the shard count"); - } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true)); - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true)); - } } public void testCreateIndexWithBlocks() { @@ -164,39 +144,38 @@ public class CreateIndexIT extends ESIntegTestCase { disableIndexBlock("test", IndexMetaData.SETTING_BLOCKS_METADATA); } + public void testUnknownSettingFails() { + try { + prepareCreate("test").setSettings(Settings.builder() + .put("index.unknown.value", "this must fail") + .build()) + .get(); + fail("should have thrown an exception about the shard count"); + } catch (IllegalArgumentException e) { + assertEquals("unknown setting [index.unknown.value]", e.getMessage()); + } + } + public void testInvalidShardCountSettingsWithoutPrefix() throws Exception { + int value = randomIntBetween(-10, 0); try { prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, 0)) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), value) .build()) .get(); fail("should have thrown an exception about the shard count"); } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true)); + assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_shards] must be >= 1", e.getMessage()); } + value = randomIntBetween(-10, -1); try { prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, -1)) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), value) .build()) .get(); fail("should have thrown an exception about the shard count"); } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true)); - } - try { - prepareCreate("test").setSettings(Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, 0)) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS.substring(IndexMetaData.INDEX_SETTING_PREFIX.length()), randomIntBetween(-10, -1)) - .build()) - .get(); - fail("should have thrown an exception about the shard count"); - } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 1 or more primary shards"), equalTo(true)); - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("index must have 0 or more replica shards"), equalTo(true)); + assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_replicas] must be >= 0", e.getMessage()); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java index a2b4d97235d..36aad4fb36e 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java @@ -24,15 +24,23 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.indices.IndexClosedException; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; +import java.util.Collection; import java.util.List; import static org.hamcrest.Matchers.is; public class IndicesSegmentsRequestTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + @Before public void setupIndex() { Settings settings = Settings.builder() diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index 3a81f0ba0d9..d7f85ec5650 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -35,10 +35,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.MockIndexEventListener; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSIndexStore; +import org.elasticsearch.test.transport.MockTransportService; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -57,6 +62,12 @@ import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class IndicesShardStoreRequestIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList( MockFSIndexStore.TestPlugin.class); + } + public void testEmpty() { ensureGreen(); IndicesShardStoresResponse rsp = client().admin().indices().prepareShardStores().get(); @@ -148,7 +159,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { internalCluster().ensureAtLeastNumDataNodes(2); assertAcked(prepareCreate(index).setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "5") - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) )); indexRandomData(index); ensureGreen(index); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index 09079be6ee9..54bf0115d87 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -93,7 +93,7 @@ public class MetaDataIndexTemplateServiceTests extends ESTestCase { null, new HashSet<>(), null, - null); + null, null); MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, new AliasValidator(Settings.EMPTY)); final List throwables = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java index 04b58e6b9fc..70d78e78f23 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -156,7 +157,7 @@ public class BulkProcessorIT extends ESIntegTestCase { public void testBulkProcessorConcurrentRequestsNoNodeAvailableException() throws Exception { //we create a transport client with no nodes to make sure it throws NoNodeAvailableException Settings settings = Settings.builder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); Client transportClient = TransportClient.builder().settings(settings).build(); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 81eb832be9a..66fb8aa3f21 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -110,7 +110,7 @@ public class BulkRequestTests extends ESTestCase { public void testBulkAddIterable() { BulkRequest bulkRequest = Requests.bulkRequest(); - List requests = new ArrayList<>(); + List> requests = new ArrayList<>(); requests.add(new IndexRequest("test", "test", "id").source("field", "value")); requests.add(new UpdateRequest("test", "test", "id").doc("field", "value")); requests.add(new DeleteRequest("test", "test", "id")); diff --git a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index ee0ceef1721..7301d37488b 100644 --- a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -19,10 +19,11 @@ package org.elasticsearch.action.search; +import java.io.IOException; + import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -34,15 +35,14 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; -import java.io.IOException; -import java.util.Collections; +import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class MultiSearchRequestTests extends ESTestCase { public void testSimpleAdd() throws Exception { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, Collections.singleton(new MatchAllQueryParser()), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser())); byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch1.json"); MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null, null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY); @@ -69,7 +69,7 @@ public class MultiSearchRequestTests extends ESTestCase { } public void testSimpleAdd2() throws Exception { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, Collections.singleton(new MatchAllQueryParser()), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser())); byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch2.json"); MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null, null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY); @@ -88,7 +88,7 @@ public class MultiSearchRequestTests extends ESTestCase { } public void testSimpleAdd3() throws Exception { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, Collections.singleton(new MatchAllQueryParser()), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser())); byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch3.json"); MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null, null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY); @@ -108,7 +108,7 @@ public class MultiSearchRequestTests extends ESTestCase { } public void testSimpleAdd4() throws Exception { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, Collections.singleton(new MatchAllQueryParser()), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser())); byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch4.json"); MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null, null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY); @@ -130,7 +130,7 @@ public class MultiSearchRequestTests extends ESTestCase { } public void testSimpleAdd5() throws Exception { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, Collections.singleton(new MatchAllQueryParser()), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser())); byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch5.json"); MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), true, null, null, null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY); diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java index fc6453318cf..9cef4d46e8b 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; @@ -38,7 +39,7 @@ public class SearchRequestBuilderTests extends ESTestCase { //this client will not be hit by any request, but it needs to be a non null proper client //that is why we create it but we don't add any transport address to it Settings settings = Settings.builder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); client = TransportClient.builder().settings(settings).build(); } diff --git a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 6a14989be1a..00068c05efe 100644 --- a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -220,9 +220,10 @@ public class TransportActionFilterChainTests extends ESTestCase { RequestTestFilter testFilter = new RequestTestFilter(randomInt(), new RequestCallback() { @Override - public void execute(Task task, final String action, final ActionRequest actionRequest, final ActionListener actionListener, final ActionFilterChain actionFilterChain) { + public , Response extends ActionResponse> void execute(Task task, String action, Request request, + ActionListener listener, ActionFilterChain actionFilterChain) { for (int i = 0; i <= additionalContinueCount; i++) { - actionFilterChain.proceed(task, action, actionRequest, actionListener); + actionFilterChain.proceed(task, action, request, listener); } } }); @@ -276,7 +277,8 @@ public class TransportActionFilterChainTests extends ESTestCase { ResponseTestFilter testFilter = new ResponseTestFilter(randomInt(), new ResponseCallback() { @Override - public void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public void execute(String action, Response response, ActionListener listener, + ActionFilterChain chain) { for (int i = 0; i <= additionalContinueCount; i++) { chain.proceed(action, response, listener); } @@ -344,17 +346,18 @@ public class TransportActionFilterChainTests extends ESTestCase { return order; } - @SuppressWarnings("unchecked") @Override - public void apply(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { + public , Response extends ActionResponse> void apply(Task task, String action, Request request, + ActionListener listener, ActionFilterChain chain) { this.runs.incrementAndGet(); this.lastActionName = action; this.executionToken = counter.incrementAndGet(); - this.callback.execute(task, action, actionRequest, actionListener, actionFilterChain); + this.callback.execute(task, action, request, listener, chain); } @Override - public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public void apply(String action, Response response, ActionListener listener, + ActionFilterChain chain) { chain.proceed(action, response, listener); } } @@ -377,12 +380,14 @@ public class TransportActionFilterChainTests extends ESTestCase { } @Override - public void apply(Task task, String action, ActionRequest request, ActionListener listener, ActionFilterChain chain) { + public , Response extends ActionResponse> void apply(Task task, String action, Request request, + ActionListener listener, ActionFilterChain chain) { chain.proceed(task, action, request, listener); } @Override - public void apply(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public void apply(String action, Response response, ActionListener listener, + ActionFilterChain chain) { this.runs.incrementAndGet(); this.lastActionName = action; this.executionToken = counter.incrementAndGet(); @@ -393,21 +398,24 @@ public class TransportActionFilterChainTests extends ESTestCase { private static enum RequestOperation implements RequestCallback { CONTINUE_PROCESSING { @Override - public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { - actionFilterChain.proceed(task, action, actionRequest, actionListener); + public , Response extends ActionResponse> void execute(Task task, String action, Request request, + ActionListener listener, ActionFilterChain actionFilterChain) { + actionFilterChain.proceed(task, action, request, listener); } }, LISTENER_RESPONSE { @Override - @SuppressWarnings("unchecked") - public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { - actionListener.onResponse(new TestResponse()); + @SuppressWarnings("unchecked") // Safe because its all we test with + public , Response extends ActionResponse> void execute(Task task, String action, Request request, + ActionListener listener, ActionFilterChain actionFilterChain) { + ((ActionListener) listener).onResponse(new TestResponse()); } }, LISTENER_FAILURE { @Override - public void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain) { - actionListener.onFailure(new ElasticsearchTimeoutException("")); + public , Response extends ActionResponse> void execute(Task task, String action, Request request, + ActionListener listener, ActionFilterChain actionFilterChain) { + listener.onFailure(new ElasticsearchTimeoutException("")); } } } @@ -415,34 +423,39 @@ public class TransportActionFilterChainTests extends ESTestCase { private static enum ResponseOperation implements ResponseCallback { CONTINUE_PROCESSING { @Override - public void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public void execute(String action, Response response, ActionListener listener, + ActionFilterChain chain) { chain.proceed(action, response, listener); } }, LISTENER_RESPONSE { @Override - @SuppressWarnings("unchecked") - public void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { - listener.onResponse(new TestResponse()); + @SuppressWarnings("unchecked") // Safe because its all we test with + public void execute(String action, Response response, ActionListener listener, + ActionFilterChain chain) { + ((ActionListener) listener).onResponse(new TestResponse()); } }, LISTENER_FAILURE { @Override - public void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain) { + public void execute(String action, Response response, ActionListener listener, + ActionFilterChain chain) { listener.onFailure(new ElasticsearchTimeoutException("")); } } } private static interface RequestCallback { - void execute(Task task, String action, ActionRequest actionRequest, ActionListener actionListener, ActionFilterChain actionFilterChain); + , Response extends ActionResponse> void execute(Task task, String action, Request request, + ActionListener listener, ActionFilterChain actionFilterChain); } private static interface ResponseCallback { - void execute(String action, ActionResponse response, ActionListener listener, ActionFilterChain chain); + void execute(String action, Response response, ActionListener listener, + ActionFilterChain chain); } - public static class TestRequest extends ActionRequest { + public static class TestRequest extends ActionRequest { @Override public ActionRequestValidationException validate() { return null; diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 90d91dfb197..d94049c036f 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -410,7 +410,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { // simulate node failure totalShards += map.get(entry.getKey()).size(); totalFailedShards += map.get(entry.getKey()).size(); - transport.handleResponse(requestId, new Exception()); + transport.handleRemoteError(requestId, new Exception()); } else { List shards = map.get(entry.getKey()); List shardResults = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index f20e54050c6..6d0ae3d2e76 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -60,8 +60,8 @@ public class IndexingMasterFailoverIT extends ESIntegTestCase { logger.info("--> start 4 nodes, 3 master, 1 data"); final Settings sharedSettings = Settings.builder() - .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // for hitting simulated network failures quickly - .put(FaultDetection.SETTING_PING_RETRIES, "1") // for hitting simulated network failures quickly + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // for hitting simulated network failures quickly + .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index 980558c2716..c9a7d9bd2d2 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -297,14 +297,14 @@ public class TransportMasterNodeActionTests extends ESTestCase { assertThat(capturedRequest.action, equalTo("testAction")); if (failsWithConnectTransportException) { - transport.handleResponse(capturedRequest.requestId, new ConnectTransportException(remoteNode, "Fake error")); + transport.handleRemoteError(capturedRequest.requestId, new ConnectTransportException(remoteNode, "Fake error")); assertFalse(listener.isDone()); clusterService.setState(ClusterStateCreationUtils.state(localNode, localNode, allNodes)); assertTrue(listener.isDone()); listener.get(); } else { Throwable t = new Throwable(); - transport.handleResponse(capturedRequest.requestId, t); + transport.handleRemoteError(capturedRequest.requestId, t); assertTrue(listener.isDone()); try { listener.get(); diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index d1abe8653f0..c3084b93eb8 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.ReplicationResponse; +import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushResponse; @@ -100,7 +101,7 @@ public class BroadcastReplicationTests extends ESTestCase { clusterService.setState(state(index, randomBoolean(), randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED, ShardRoutingState.UNASSIGNED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - Future response = (broadcastReplicationAction.execute(new BroadcastRequest().indices(index))); + Future response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index))); for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { if (randomBoolean()) { shardRequests.v2().onFailure(new NoShardAvailableActionException(shardRequests.v1())); @@ -119,7 +120,7 @@ public class BroadcastReplicationTests extends ESTestCase { clusterService.setState(state(index, randomBoolean(), ShardRoutingState.STARTED)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - Future response = (broadcastReplicationAction.execute(new BroadcastRequest().indices(index))); + Future response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index))); for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { ReplicationResponse replicationResponse = new ReplicationResponse(); replicationResponse.setShardInfo(new ReplicationResponse.ShardInfo(1, 1, new ReplicationResponse.ShardInfo.Failure[0])); @@ -134,7 +135,7 @@ public class BroadcastReplicationTests extends ESTestCase { int numShards = randomInt(3); clusterService.setState(stateWithAssignedPrimariesAndOneReplica(index, numShards)); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - Future response = (broadcastReplicationAction.execute(new BroadcastRequest().indices(index))); + Future response = (broadcastReplicationAction.execute(new DummyBroadcastRequest().indices(index))); int succeeded = 0; int failed = 0; for (Tuple> shardRequests : broadcastReplicationAction.capturedShardRequests) { @@ -164,7 +165,7 @@ public class BroadcastReplicationTests extends ESTestCase { public void testNoShards() throws InterruptedException, ExecutionException, IOException { clusterService.setState(stateWithNoShard()); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - BroadcastResponse response = executeAndAssertImmediateResponse(broadcastReplicationAction, new BroadcastRequest()); + BroadcastResponse response = executeAndAssertImmediateResponse(broadcastReplicationAction, new DummyBroadcastRequest()); assertBroadcastResponse(0, 0, 0, response, null); } @@ -174,16 +175,19 @@ public class BroadcastReplicationTests extends ESTestCase { ClusterState clusterState = state(index, randomBoolean(), randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED, ShardRoutingState.UNASSIGNED); logger.debug("--> using initial state:\n{}", clusterService.state().prettyPrint()); - List shards = broadcastReplicationAction.shards(new BroadcastRequest().indices(shardId.index().name()), clusterState); + List shards = broadcastReplicationAction.shards(new DummyBroadcastRequest().indices(shardId.index().name()), clusterState); assertThat(shards.size(), equalTo(1)); assertThat(shards.get(0), equalTo(shardId)); } - private class TestBroadcastReplicationAction extends TransportBroadcastReplicationAction { + private class TestBroadcastReplicationAction extends TransportBroadcastReplicationAction { protected final Set>> capturedShardRequests = ConcurrentCollections.newConcurrentSet(); - public TestBroadcastReplicationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportReplicationAction replicatedBroadcastShardAction) { - super("test-broadcast-replication-action", BroadcastRequest::new, settings, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction); + public TestBroadcastReplicationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + TransportReplicationAction replicatedBroadcastShardAction) { + super("test-broadcast-replication-action", DummyBroadcastRequest::new, settings, threadPool, clusterService, transportService, + actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction); } @Override @@ -192,17 +196,18 @@ public class BroadcastReplicationTests extends ESTestCase { } @Override - protected ReplicationRequest newShardRequest(BroadcastRequest request, ShardId shardId) { - return new ReplicationRequest().setShardId(shardId); + protected BasicReplicationRequest newShardRequest(DummyBroadcastRequest request, ShardId shardId) { + return new BasicReplicationRequest().setShardId(shardId); } @Override - protected BroadcastResponse newResponse(int successfulShards, int failedShards, int totalNumCopies, List shardFailures) { + protected BroadcastResponse newResponse(int successfulShards, int failedShards, int totalNumCopies, + List shardFailures) { return new BroadcastResponse(totalNumCopies, successfulShards, failedShards, shardFailures); } @Override - protected void shardExecute(BroadcastRequest request, ShardId shardId, ActionListener shardActionListener) { + protected void shardExecute(DummyBroadcastRequest request, ShardId shardId, ActionListener shardActionListener) { capturedShardRequests.add(new Tuple<>(shardId, shardActionListener)); } } @@ -216,7 +221,7 @@ public class BroadcastReplicationTests extends ESTestCase { return flushResponse; } - public BroadcastResponse executeAndAssertImmediateResponse(TransportBroadcastReplicationAction broadcastAction, BroadcastRequest request) throws InterruptedException, ExecutionException { + public BroadcastResponse executeAndAssertImmediateResponse(TransportBroadcastReplicationAction broadcastAction, DummyBroadcastRequest request) throws InterruptedException, ExecutionException { return (BroadcastResponse) broadcastAction.execute(request).actionGet("5s"); } @@ -228,4 +233,8 @@ public class BroadcastReplicationTests extends ESTestCase { assertThat(response.getShardFailures()[0].getCause().getCause(), instanceOf(exceptionClass)); } } + + public static class DummyBroadcastRequest extends BroadcastRequest { + + } } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 0a390ea3706..9fdbdf1cb38 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -488,7 +489,7 @@ public class TransportReplicationActionTests extends ESTestCase { TransportReplicationAction.ReplicationPhase replicationPhase = action.new ReplicationPhase(request, new Response(), - request.shardId(), createTransportChannel(listener), reference, null); + request.shardId(), createTransportChannel(listener), reference); assertThat(replicationPhase.totalShards(), equalTo(totalShards)); assertThat(replicationPhase.pending(), equalTo(assignedReplicas)); @@ -545,7 +546,7 @@ public class TransportReplicationActionTests extends ESTestCase { t = new IndexShardNotStartedException(shardId, IndexShardState.RECOVERING); } logger.debug("--> simulating failure on {} with [{}]", capturedRequest.node, t.getClass().getSimpleName()); - transport.handleResponse(capturedRequest.requestId, t); + transport.handleRemoteError(capturedRequest.requestId, t); if (criticalFailure) { CapturingTransport.CapturedRequest[] shardFailedRequests = transport.getCapturedRequestsAndClear(); assertEquals(1, shardFailedRequests.length); @@ -557,7 +558,23 @@ public class TransportReplicationActionTests extends ESTestCase { // the shard the request was sent to and the shard to be failed should be the same assertEquals(shardRoutingEntry.getShardRouting(), routing); failures.add(shardFailedRequest); - transport.handleResponse(shardFailedRequest.requestId, TransportResponse.Empty.INSTANCE); + if (randomBoolean()) { + // simulate master left and test that the shard failure is retried + int numberOfRetries = randomIntBetween(1, 4); + CapturingTransport.CapturedRequest currentRequest = shardFailedRequest; + for (int retryNumber = 0; retryNumber < numberOfRetries; retryNumber++) { + // force a new cluster state to simulate a new master having been elected + clusterService.setState(ClusterState.builder(clusterService.state())); + transport.handleRemoteError(currentRequest.requestId, new NotMasterException("shard-failed-test")); + CapturingTransport.CapturedRequest[] retryRequests = transport.getCapturedRequestsAndClear(); + assertEquals(1, retryRequests.length); + currentRequest = retryRequests[0]; + } + // now simulate that the last retry succeeded + transport.handleResponse(currentRequest.requestId, TransportResponse.Empty.INSTANCE); + } else { + transport.handleResponse(shardFailedRequest.requestId, TransportResponse.Empty.INSTANCE); + } } } else { successful++; @@ -583,7 +600,7 @@ public class TransportReplicationActionTests extends ESTestCase { assertIndexShardCounter(1); } - public void testCounterOnPrimary() throws InterruptedException, ExecutionException, IOException { + public void testCounterOnPrimary() throws Exception { final String index = "test"; final ShardId shardId = new ShardId(index, 0); // no replica, we only want to test on primary @@ -611,9 +628,7 @@ public class TransportReplicationActionTests extends ESTestCase { t.start(); // shard operation should be ongoing, so the counter is at 2 // we have to wait here because increment happens in thread - awaitBusy(() -> count.get() == 2); - - assertIndexShardCounter(2); + assertBusy(() -> assertIndexShardCounter(2)); assertThat(transport.capturedRequests().length, equalTo(0)); ((ActionWithDelay) action).countDownLatch.countDown(); t.join(); @@ -647,7 +662,7 @@ public class TransportReplicationActionTests extends ESTestCase { CapturingTransport.CapturedRequest[] replicationRequests = transport.getCapturedRequestsAndClear(); assertThat(replicationRequests.length, equalTo(1)); // try with failure response - transport.handleResponse(replicationRequests[0].requestId, new CorruptIndexException("simulated", (String) null)); + transport.handleRemoteError(replicationRequests[0].requestId, new CorruptIndexException("simulated", (String) null)); CapturingTransport.CapturedRequest[] shardFailedRequests = transport.getCapturedRequestsAndClear(); assertEquals(1, shardFailedRequests.length); transport.handleResponse(shardFailedRequests[0].requestId, TransportResponse.Empty.INSTANCE); @@ -664,7 +679,7 @@ public class TransportReplicationActionTests extends ESTestCase { @Override public void run() { try { - replicaOperationTransportHandler.messageReceived(new Request(), createTransportChannel(new PlainActionFuture<>())); + replicaOperationTransportHandler.messageReceived(new Request().setShardId(shardId), createTransportChannel(new PlainActionFuture<>())); } catch (Exception e) { } } @@ -672,7 +687,7 @@ public class TransportReplicationActionTests extends ESTestCase { t.start(); // shard operation should be ongoing, so the counter is at 2 // we have to wait here because increment happens in thread - awaitBusy(() -> count.get() == 2); + assertBusy(() -> assertIndexShardCounter(2)); ((ActionWithDelay) action).countDownLatch.countDown(); t.join(); // operation should have finished and counter decreased because no outstanding replica requests diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index fce431238dd..344846c363e 100644 --- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -198,7 +198,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { long requestId = transport.capturedRequests()[0].requestId; transport.clear(); // this should not trigger retry or anything and the listener should report exception immediately - transport.handleResponse(requestId, new TransportException("a generic transport exception", new Exception("generic test exception"))); + transport.handleRemoteError(requestId, new TransportException("a generic transport exception", new Exception("generic test exception"))); try { // result should return immediately @@ -240,7 +240,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { long requestId = transport.capturedRequests()[0].requestId; transport.clear(); DiscoveryNode node = clusterService.state().getNodes().getLocalNode(); - transport.handleResponse(requestId, new ConnectTransportException(node, "test exception")); + transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception")); // trigger cluster state observer clusterService.setState(ClusterStateCreationUtils.state("test", local, ShardRoutingState.STARTED)); assertThat(transport.capturedRequests().length, equalTo(1)); @@ -258,7 +258,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { long requestId = transport.capturedRequests()[0].requestId; transport.clear(); DiscoveryNode node = clusterService.state().getNodes().getLocalNode(); - transport.handleResponse(requestId, new ConnectTransportException(node, "test exception")); + transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception")); // wait until the timeout was triggered and we actually tried to send for the second time assertBusy(new Runnable() { @@ -270,7 +270,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { // let it fail the second time too requestId = transport.capturedRequests()[0].requestId; - transport.handleResponse(requestId, new ConnectTransportException(node, "test exception")); + transport.handleLocalError(requestId, new ConnectTransportException(node, "test exception")); try { // result should return immediately assertTrue(listener.isDone()); @@ -313,4 +313,4 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { } } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index d1217ea6f31..35e33b378bf 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -411,7 +411,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { protected TermVectorsRequestBuilder getRequestForConfig(TestConfig config) { return client().prepareTermVectors(randomBoolean() ? config.doc.index : config.doc.alias, config.doc.type, config.doc.id).setPayloads(config.requestPayloads) .setOffsets(config.requestOffsets).setPositions(config.requestPositions).setFieldStatistics(true).setTermStatistics(true) - .setSelectedFields(config.selectedFields); + .setSelectedFields(config.selectedFields).setRealtime(false); } protected Fields getTermVectorsFromLucene(DirectoryReader directoryReader, TestDoc doc) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java index 1cbe05da6a0..867d054a712 100644 --- a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -443,7 +443,7 @@ public class IndexAliasesIT extends ESIntegTestCase { public void testWaitForAliasCreationSingleShard() throws Exception { logger.info("--> creating index [test]"); - assertAcked(admin().indices().create(createIndexRequest("test").settings(settingsBuilder().put("index.numberOfReplicas", 0).put("index.numberOfShards", 1))).get()); + assertAcked(admin().indices().create(createIndexRequest("test").settings(settingsBuilder().put("index.number_of_replicas", 0).put("index.number_of_shards", 1))).get()); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index 1518f2ebc3a..74881413799 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -39,11 +39,11 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.MultiDataPathUpgrader; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.IndexSettings; @@ -51,12 +51,14 @@ import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.mapper.string.StringFieldMapperPositionIncrementGapTests; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.MergePolicyConfig; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; @@ -73,6 +75,7 @@ import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Locale; @@ -91,6 +94,12 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { // TODO: test for proper exception on unsupported indexes (maybe via separate test?) // We have a 0.20.6.zip etc for this. + + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); + } + List indexes; List unsupportedIndexes; static Path singleDataPath; @@ -134,13 +143,13 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { Path baseTempDir = createTempDir(); // start single data path node Settings.Builder nodeSettings = Settings.builder() - .put("path.data", baseTempDir.resolve("single-path").toAbsolutePath()) + .put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("single-path").toAbsolutePath()) .put("node.master", false); // workaround for dangling index loading issue when node is master InternalTestCluster.Async singleDataPathNode = internalCluster().startNodeAsync(nodeSettings.build()); // start multi data path node nodeSettings = Settings.builder() - .put("path.data", baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir.resolve("multi-path2").toAbsolutePath()) + .put(Environment.PATH_DATA_SETTING.getKey(), baseTempDir.resolve("multi-path1").toAbsolutePath() + "," + baseTempDir.resolve("multi-path2").toAbsolutePath()) .put("node.master", false); // workaround for dangling index loading issue when node is master InternalTestCluster.Async multiDataPathNode = internalCluster().startNodeAsync(nodeSettings.build()); @@ -199,10 +208,6 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { } void importIndex(String indexName) throws IOException { - final Iterable instances = internalCluster().getInstances(NodeEnvironment.class); - for (NodeEnvironment nodeEnv : instances) { // upgrade multidata path - MultiDataPathUpgrader.upgradeMultiDataPath(nodeEnv, logger); - } // force reloading dangling indices with a cluster state republish client().admin().cluster().prepareReroute().get(); ensureGreen(indexName); @@ -210,6 +215,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { // randomly distribute the files from src over dests paths public static void copyIndex(final ESLogger logger, final Path src, final String indexName, final Path... dests) throws IOException { + Path destinationDataPath = dests[randomInt(dests.length - 1)]; for (Path dest : dests) { Path indexDir = dest.resolve(indexName); assertFalse(Files.exists(indexDir)); @@ -235,7 +241,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { } Path relativeFile = src.relativize(file); - Path destFile = dests[randomInt(dests.length - 1)].resolve(indexName).resolve(relativeFile); + Path destFile = destinationDataPath.resolve(indexName).resolve(relativeFile); logger.trace("--> Moving " + relativeFile.toString() + " to " + destFile.toString()); Files.move(file, destFile); assertFalse(Files.exists(file)); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java index a573a8374ef..23163b86112 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.bwcompat; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; @@ -28,7 +29,7 @@ public class RecoveryWithUnsupportedIndicesIT extends StaticIndexBackwardCompati String indexName = "unsupported-0.20.6"; logger.info("Checking static index " + indexName); - Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip"), Node.HTTP_ENABLED, true); + Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip"), NetworkModule.HTTP_ENABLED.getKey(), true); try { internalCluster().startNode(nodeSettings); fail(); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 6ad05b3ff84..eabb954c2c6 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -1,3 +1,4 @@ +/* /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -27,6 +28,8 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.repositories.uri.URLRepository; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.RestoreInfo; @@ -64,7 +67,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { // Configure using path.repo return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put("path.repo", getBwcIndicesPath()) + .put(Environment.PATH_REPO_SETTING.getKey(), getBwcIndicesPath()) .build(); } else { // Configure using url white list @@ -72,7 +75,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { URI repoJarPatternUri = new URI("jar:" + getBwcIndicesPath().toUri().toString() + "*.zip!/repo/"); return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .putArray("repositories.url.allowed_urls", repoJarPatternUri.toString()) + .putArray(URLRepository.ALLOWED_URLS_SETTING.getKey(), repoJarPatternUri.toString()) .build(); } catch (URISyntaxException ex) { throw new IllegalArgumentException(ex); diff --git a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index b814cff520c..3bdfc1fb7ee 100644 --- a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -48,6 +48,7 @@ import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.support.Headers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportMessage; @@ -90,7 +91,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { public void initClient() { Settings settings = Settings.builder() .put(HEADER_SETTINGS) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); threadPool = new ThreadPool("test-" + getTestName()); client = buildClient(settings, ACTIONS); diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java index f127ae28378..e61dab2fc4b 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; @@ -83,7 +84,7 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTestCase { .put("node.name", "transport_client_" + this.getTestName() + "_1") .put("client.transport.nodes_sampler_interval", "1s") .put(HEADER_SETTINGS) - .put("path.home", createTempDir().toString()).build()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build()) .addPlugin(InternalTransportService.TestPlugin.class) .build(); diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java index f01fdffd147..8ab432d6e41 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESIntegTestCase; @@ -52,7 +53,7 @@ public class TransportClientIT extends ESIntegTestCase { TransportClientNodesService nodeService = client.nodeService(); Node node = new Node(Settings.builder() .put(internalCluster().getDefaultSettings()) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("node.name", "testNodeVersionIsUpdated") .put("http.enabled", false) .put("node.data", false) @@ -89,7 +90,10 @@ public class TransportClientIT extends ESIntegTestCase { } public void testThatTransportClientSettingCannotBeChanged() { - Settings baseSettings = settingsBuilder().put(Client.CLIENT_TYPE_SETTING, "anything").put("path.home", createTempDir()).build(); + Settings baseSettings = settingsBuilder() + .put(Client.CLIENT_TYPE_SETTING, "anything") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .build(); try (TransportClient client = TransportClient.builder().settings(baseSettings).build()) { Settings settings = client.injector.getInstance(Settings.class); assertThat(settings.get(Client.CLIENT_TYPE_SETTING), is("transport")); diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java index b28fdba8c77..e5367d1da42 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -57,7 +58,7 @@ public class TransportClientRetryIT extends ESIntegTestCase { .put("node.mode", internalCluster().getNodeMode()) .put(ClusterName.SETTING, internalCluster().getClusterName()) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) - .put("path.home", createTempDir()); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); try (TransportClient transportClient = TransportClient.builder().settings(builder.build()).build()) { transportClient.addTransportAddresses(addresses); diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 31487614c99..9a8e8fb7268 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionModule; @@ -100,7 +101,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { } @Override - protected boolean apply(String action, ActionRequest request, ActionListener listener) { + protected boolean apply(String action, ActionRequest request, ActionListener listener) { if (blockedActions.contains(action)) { throw new ElasticsearchException("force exception on [" + action + "]"); } @@ -108,7 +109,7 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { } @Override - protected boolean apply(String action, ActionResponse response, ActionListener listener) { + protected boolean apply(String action, ActionResponse response, ActionListener listener) { return true; } @@ -139,8 +140,8 @@ public class ClusterInfoServiceIT extends ESIntegTestCase { public void testClusterInfoServiceCollectsInformation() throws Exception { internalCluster().startNodesAsync(2).get(); assertAcked(prepareCreate("test").setSettings(settingsBuilder() - .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE).build())); + .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), 0) + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE).build())); ensureGreen("test"); InternalTestCluster internalTestCluster = internalCluster(); // Get the cluster info service on the master node diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index 0e9f6cd9e04..7cfa0eeaaa7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -31,15 +31,13 @@ import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllo import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; -import org.elasticsearch.cluster.settings.DynamicSettings; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.index.settings.IndexDynamicSettings; public class ClusterModuleTests extends ModuleTestCase { @@ -93,18 +91,19 @@ public class ClusterModuleTests extends ModuleTestCase { } public void testRegisterIndexDynamicSettingDuplicate() { - ClusterModule module = new ClusterModule(Settings.EMPTY); + SettingsModule module = new SettingsModule(Settings.EMPTY, new SettingsFilter(Settings.EMPTY)); try { - module.registerIndexDynamicSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, Validator.EMPTY); + module.registerSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING); } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE + "] twice"); + assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice"); } } public void testRegisterIndexDynamicSetting() { - ClusterModule module = new ClusterModule(Settings.EMPTY); - module.registerIndexDynamicSetting("foo.bar", Validator.EMPTY); - assertInstanceBindingWithAnnotation(module, DynamicSettings.class, dynamicSettings -> dynamicSettings.hasDynamicSetting("foo.bar"), IndexDynamicSettings.class); + final SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY); + SettingsModule module = new SettingsModule(Settings.EMPTY, settingsFilter); + module.registerSetting(Setting.boolSetting("foo.bar", false, true, Setting.Scope.INDEX)); + assertInstanceBinding(module, IndexScopedSettings.class, service -> service.hasDynamicSetting("foo.bar")); } public void testRegisterAllocationDeciderDuplicate() { diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java index 2d781c866de..72d58f7f70e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterServiceIT.java @@ -642,7 +642,7 @@ public class ClusterServiceIT extends ESIntegTestCase { Settings settings = settingsBuilder() .put("discovery.type", "zen") .put("discovery.zen.minimum_master_nodes", 1) - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "400ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms") .put("discovery.initial_state_timeout", "500ms") .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java index 98eea13e673..f581e4c91f6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java @@ -164,7 +164,50 @@ public class DiskUsageTests extends ESTestCase { assertDiskUsage(mostNode_3, node3FSInfo[1]); } + public void testFillDiskUsageSomeInvalidValues() { + ImmutableOpenMap.Builder newLeastAvailableUsages = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder newMostAvailableUsages = ImmutableOpenMap.builder(); + FsInfo.Path[] node1FSInfo = new FsInfo.Path[] { + new FsInfo.Path("/middle", "/dev/sda", 100, 90, 80), + new FsInfo.Path("/least", "/dev/sdb", -1, -1, -1), + new FsInfo.Path("/most", "/dev/sdc", 300, 290, 280), + }; + FsInfo.Path[] node2FSInfo = new FsInfo.Path[] { + new FsInfo.Path("/least_most", "/dev/sda", -2, -1, -1), + }; + + FsInfo.Path[] node3FSInfo = new FsInfo.Path[] { + new FsInfo.Path("/most", "/dev/sda", 100, 90, 70), + new FsInfo.Path("/least", "/dev/sda", 10, -8, 0), + }; + NodeStats[] nodeStats = new NodeStats[] { + new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, + null,null,null,null,null,new FsInfo(0, node1FSInfo), null,null,null,null,null), + new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, + null,null,null,null,null, new FsInfo(0, node2FSInfo), null,null,null,null,null), + new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, Version.CURRENT), 0, + null,null,null,null,null, new FsInfo(0, node3FSInfo), null,null,null,null,null) + }; + InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvailableUsages, newMostAvailableUsages); + DiskUsage leastNode_1 = newLeastAvailableUsages.get("node_1"); + DiskUsage mostNode_1 = newMostAvailableUsages.get("node_1"); + assertNull("node1 should have been skipped", leastNode_1); + assertDiskUsage(mostNode_1, node1FSInfo[2]); + + DiskUsage leastNode_2 = newLeastAvailableUsages.get("node_2"); + DiskUsage mostNode_2 = newMostAvailableUsages.get("node_2"); + assertNull("node2 should have been skipped", leastNode_2); + assertNull("node2 should have been skipped", mostNode_2); + + DiskUsage leastNode_3 = newLeastAvailableUsages.get("node_3"); + DiskUsage mostNode_3 = newMostAvailableUsages.get("node_3"); + assertDiskUsage(leastNode_3, node3FSInfo[1]); + assertDiskUsage(mostNode_3, node3FSInfo[0]); + } + private void assertDiskUsage(DiskUsage usage, FsInfo.Path path) { + assertNotNull(usage); + assertNotNull(path); assertEquals(usage.toString(), usage.getPath(), path.getPath()); assertEquals(usage.toString(), usage.getTotalBytes(), path.getTotal().bytes()); assertEquals(usage.toString(), usage.getFreeBytes(), path.getAvailable().bytes()); diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 2d726d97424..612f910d3fa 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -77,7 +77,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { Settings settings = settingsBuilder() .put("discovery.type", "zen") .put("discovery.zen.minimum_master_nodes", 2) - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put("discovery.initial_state_timeout", "500ms") .build(); @@ -189,7 +189,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { Settings settings = settingsBuilder() .put("discovery.type", "zen") .put("discovery.zen.minimum_master_nodes", 3) - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "1s") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "1s") .put("discovery.initial_state_timeout", "500ms") .build(); @@ -264,7 +264,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { public void testDynamicUpdateMinimumMasterNodes() throws Exception { Settings settings = settingsBuilder() .put("discovery.type", "zen") - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "400ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "400ms") .put("discovery.initial_state_timeout", "500ms") .build(); @@ -322,7 +322,7 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { int nodeCount = scaledRandomIntBetween(1, 5); Settings.Builder settings = settingsBuilder() .put("discovery.type", "zen") - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put("discovery.initial_state_timeout", "500ms"); // set an initial value which is at least quorum to avoid split brains during initial startup @@ -361,8 +361,8 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { public void testCanNotPublishWithoutMinMastNodes() throws Exception { Settings settings = settingsBuilder() .put("discovery.type", "zen") - .put(FaultDetection.SETTING_PING_TIMEOUT, "1h") // disable it - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1h") // disable it + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "100ms") // speed things up .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java index 8e5479d6f84..3c71f5f5e2b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/NoMasterNodeIT.java @@ -65,7 +65,7 @@ public class NoMasterNodeIT extends ESIntegTestCase { .put("discovery.type", "zen") .put("action.auto_create_index", autoCreateIndex) .put("discovery.zen.minimum_master_nodes", 2) - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put("discovery.initial_state_timeout", "500ms") .put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "all") .build(); @@ -217,7 +217,7 @@ public class NoMasterNodeIT extends ESIntegTestCase { .put("discovery.type", "zen") .put("action.auto_create_index", false) .put("discovery.zen.minimum_master_nodes", 2) - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put("discovery.initial_state_timeout", "500ms") .put(DiscoverySettings.NO_MASTER_BLOCK_SETTING.getKey(), "write") .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index f69c9149a9c..c59405f2345 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -20,41 +20,81 @@ package org.elasticsearch.cluster.action.shard; import org.apache.lucene.index.CorruptIndexException; +import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.discovery.Discovery; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.cluster.TestClusterService; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.ReceiveTimeoutTransportException; +import org.elasticsearch.transport.NodeDisconnectedException; +import org.elasticsearch.transport.NodeNotConnectedException; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.SendRequestTransportException; import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.LongConsumer; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithStartedPrimary; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.is; public class ShardStateActionTests extends ESTestCase { private static ThreadPool THREAD_POOL; - private ShardStateAction shardStateAction; + private TestShardStateAction shardStateAction; private CapturingTransport transport; private TransportService transportService; private TestClusterService clusterService; + private static class TestShardStateAction extends ShardStateAction { + public TestShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService) { + super(settings, clusterService, transportService, allocationService, routingService); + } + + private Runnable onBeforeWaitForNewMasterAndRetry; + + public void setOnBeforeWaitForNewMasterAndRetry(Runnable onBeforeWaitForNewMasterAndRetry) { + this.onBeforeWaitForNewMasterAndRetry = onBeforeWaitForNewMasterAndRetry; + } + + private Runnable onAfterWaitForNewMasterAndRetry; + + public void setOnAfterWaitForNewMasterAndRetry(Runnable onAfterWaitForNewMasterAndRetry) { + this.onAfterWaitForNewMasterAndRetry = onAfterWaitForNewMasterAndRetry; + } + + @Override + protected void waitForNewMasterAndRetry(String actionName, ClusterStateObserver observer, ShardRoutingEntry shardRoutingEntry, Listener listener) { + onBeforeWaitForNewMasterAndRetry.run(); + super.waitForNewMasterAndRetry(actionName, observer, shardRoutingEntry, listener); + onAfterWaitForNewMasterAndRetry.run(); + } + } + @BeforeClass public static void startThreadPool() { THREAD_POOL = new ThreadPool("ShardStateActionTest"); @@ -68,7 +108,9 @@ public class ShardStateActionTests extends ESTestCase { clusterService = new TestClusterService(THREAD_POOL); transportService = new TransportService(transport, THREAD_POOL); transportService.start(); - shardStateAction = new ShardStateAction(Settings.EMPTY, clusterService, transportService, null, null); + shardStateAction = new TestShardStateAction(Settings.EMPTY, clusterService, transportService, null, null); + shardStateAction.setOnBeforeWaitForNewMasterAndRetry(() -> {}); + shardStateAction.setOnAfterWaitForNewMasterAndRetry(() -> {}); } @Override @@ -84,36 +126,144 @@ public class ShardStateActionTests extends ESTestCase { THREAD_POOL = null; } - public void testNoMaster() { + public void testSuccess() throws InterruptedException { final String index = "test"; clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); - DiscoveryNodes.Builder builder = DiscoveryNodes.builder(clusterService.state().nodes()); - builder.masterNodeId(null); - clusterService.setState(ClusterState.builder(clusterService.state()).nodes(builder)); - String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); - AtomicBoolean noMaster = new AtomicBoolean(); - assert !noMaster.get(); + AtomicBoolean success = new AtomicBoolean(); + CountDownLatch latch = new CountDownLatch(1); - shardStateAction.shardFailed(clusterService.state(), getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + ShardRouting shardRouting = getRandomShardRouting(index); + shardStateAction.shardFailed(shardRouting, indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { @Override - public void onShardFailedNoMaster() { - noMaster.set(true); + public void onSuccess() { + success.set(true); + latch.countDown(); } @Override - public void onShardFailedFailure(DiscoveryNode master, TransportException e) { - + public void onFailure(Throwable t) { + success.set(false); + latch.countDown(); + assert false; } }); - assertTrue(noMaster.get()); + CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + assertEquals(1, capturedRequests.length); + // the request is a shard failed request + assertThat(capturedRequests[0].request, is(instanceOf(ShardStateAction.ShardRoutingEntry.class))); + ShardStateAction.ShardRoutingEntry shardRoutingEntry = (ShardStateAction.ShardRoutingEntry)capturedRequests[0].request; + // for the right shard + assertEquals(shardRouting, shardRoutingEntry.getShardRouting()); + // sent to the master + assertEquals(clusterService.state().nodes().masterNode().getId(), capturedRequests[0].node.getId()); + + transport.handleResponse(capturedRequests[0].requestId, TransportResponse.Empty.INSTANCE); + + latch.await(); + assertTrue(success.get()); } - public void testFailure() { + public void testNoMaster() throws InterruptedException { + final String index = "test"; + + clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); + + DiscoveryNodes.Builder noMasterBuilder = DiscoveryNodes.builder(clusterService.state().nodes()); + noMasterBuilder.masterNodeId(null); + clusterService.setState(ClusterState.builder(clusterService.state()).nodes(noMasterBuilder)); + + String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); + + CountDownLatch latch = new CountDownLatch(1); + AtomicInteger retries = new AtomicInteger(); + AtomicBoolean success = new AtomicBoolean(); + + setUpMasterRetryVerification(1, retries, latch, requestId -> {}); + + shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + success.set(true); + latch.countDown(); + } + + @Override + public void onFailure(Throwable e) { + success.set(false); + latch.countDown(); + assert false; + } + }); + + latch.await(); + + assertThat(retries.get(), equalTo(1)); + assertTrue(success.get()); + } + + public void testMasterChannelException() throws InterruptedException { + final String index = "test"; + + clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); + + String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); + + CountDownLatch latch = new CountDownLatch(1); + AtomicInteger retries = new AtomicInteger(); + AtomicBoolean success = new AtomicBoolean(); + AtomicReference throwable = new AtomicReference<>(); + + LongConsumer retryLoop = requestId -> { + if (randomBoolean()) { + transport.handleRemoteError( + requestId, + randomFrom(new NotMasterException("simulated"), new Discovery.FailedToCommitClusterStateException("simulated"))); + } else { + if (randomBoolean()) { + transport.handleLocalError(requestId, new NodeNotConnectedException(null, "simulated")); + } else { + transport.handleError(requestId, new NodeDisconnectedException(null, ShardStateAction.SHARD_FAILED_ACTION_NAME)); + } + } + }; + + final int numberOfRetries = randomIntBetween(1, 256); + setUpMasterRetryVerification(numberOfRetries, retries, latch, retryLoop); + + shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + success.set(true); + latch.countDown(); + } + + @Override + public void onFailure(Throwable t) { + success.set(false); + throwable.set(t); + latch.countDown(); + assert false; + } + }); + + final CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + assertThat(capturedRequests.length, equalTo(1)); + assertFalse(success.get()); + assertThat(retries.get(), equalTo(0)); + retryLoop.accept(capturedRequests[0].requestId); + + latch.await(); + assertNull(throwable.get()); + assertThat(retries.get(), equalTo(numberOfRetries)); + assertTrue(success.get()); + } + + public void testUnhandledFailure() { final String index = "test"; clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); @@ -121,59 +271,28 @@ public class ShardStateActionTests extends ESTestCase { String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); AtomicBoolean failure = new AtomicBoolean(); - assert !failure.get(); - shardStateAction.shardFailed(clusterService.state(), getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + shardStateAction.shardFailed(getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), new ShardStateAction.Listener() { @Override - public void onShardFailedNoMaster() { - + public void onSuccess() { + failure.set(false); + assert false; } @Override - public void onShardFailedFailure(DiscoveryNode master, TransportException e) { + public void onFailure(Throwable t) { failure.set(true); } }); final CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); assertThat(capturedRequests.length, equalTo(1)); - assert !failure.get(); - transport.handleResponse(capturedRequests[0].requestId, new TransportException("simulated")); + assertFalse(failure.get()); + transport.handleRemoteError(capturedRequests[0].requestId, new TransportException("simulated")); assertTrue(failure.get()); } - public void testTimeout() throws InterruptedException { - final String index = "test"; - - clusterService.setState(stateWithStartedPrimary(index, true, randomInt(5))); - - String indexUUID = clusterService.state().metaData().index(index).getIndexUUID(); - - AtomicBoolean progress = new AtomicBoolean(); - AtomicBoolean timedOut = new AtomicBoolean(); - - TimeValue timeout = new TimeValue(1, TimeUnit.MILLISECONDS); - CountDownLatch latch = new CountDownLatch(1); - shardStateAction.shardFailed(clusterService.state(), getRandomShardRouting(index), indexUUID, "test", getSimulatedFailure(), timeout, new ShardStateAction.Listener() { - @Override - public void onShardFailedFailure(DiscoveryNode master, TransportException e) { - if (e instanceof ReceiveTimeoutTransportException) { - assertFalse(progress.get()); - timedOut.set(true); - } - latch.countDown(); - } - }); - - latch.await(); - progress.set(true); - assertTrue(timedOut.get()); - - final CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); - assertThat(capturedRequests.length, equalTo(1)); - } - private ShardRouting getRandomShardRouting(String index) { IndexRoutingTable indexRoutingTable = clusterService.state().routingTable().index(index); ShardsIterator shardsIterator = indexRoutingTable.randomAllActiveShardsIt(); @@ -182,6 +301,34 @@ public class ShardStateActionTests extends ESTestCase { return shardRouting; } + private void setUpMasterRetryVerification(int numberOfRetries, AtomicInteger retries, CountDownLatch latch, LongConsumer retryLoop) { + shardStateAction.setOnBeforeWaitForNewMasterAndRetry(() -> { + DiscoveryNodes.Builder masterBuilder = DiscoveryNodes.builder(clusterService.state().nodes()); + masterBuilder.masterNodeId(clusterService.state().nodes().masterNodes().iterator().next().value.id()); + clusterService.setState(ClusterState.builder(clusterService.state()).nodes(masterBuilder)); + }); + + shardStateAction.setOnAfterWaitForNewMasterAndRetry(() -> verifyRetry(numberOfRetries, retries, latch, retryLoop)); + } + + private void verifyRetry(int numberOfRetries, AtomicInteger retries, CountDownLatch latch, LongConsumer retryLoop) { + // assert a retry request was sent + final CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + if (capturedRequests.length == 1) { + retries.incrementAndGet(); + if (retries.get() == numberOfRetries) { + // finish the request + transport.handleResponse(capturedRequests[0].requestId, TransportResponse.Empty.INSTANCE); + } else { + retryLoop.accept(capturedRequests[0].requestId); + } + } else { + // there failed to be a retry request + // release the driver thread to fail the test + latch.countDown(); + } + } + private Throwable getSimulatedFailure() { return new CorruptIndexException("simulated", (String) null); } diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java index 1e9c25ed78c..cf948366f6a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java @@ -108,7 +108,7 @@ public class AwarenessAllocationIT extends ESIntegTestCase { .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_FORCE_GROUP_SETTING.getKey() + "zone.values", "a,b") .put(AwarenessAllocationDecider.CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey(), "zone") .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 3) - .put(ZenDiscovery.SETTING_JOIN_TIMEOUT, "10s") + .put(ZenDiscovery.JOIN_TIMEOUT_SETTING.getKey(), "10s") .build(); logger.info("--> starting 4 nodes on different zones"); diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 6b406a3bfdf..793cb0ce421 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RerouteExplanation; import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; -import org.elasticsearch.cluster.routing.allocation.command.AllocateAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; @@ -100,7 +100,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate shard 1, *under dry_run*"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) .setDryRun(true) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); @@ -113,7 +113,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate shard 1, actually allocating, no dry run"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING)); @@ -162,8 +162,8 @@ public class ClusterRerouteIT extends ESIntegTestCase { public void testDelayWithALargeAmountOfShards() throws Exception { Settings commonSettings = settingsBuilder() - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, 1) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 1) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 1) .build(); logger.info("--> starting 4 nodes"); String node_1 = internalCluster().startNode(commonSettings); @@ -212,7 +212,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate shard 1, actually allocating, no dry run"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING)); @@ -246,7 +246,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { logger.info("--> explicitly allocate primary"); state = client().admin().cluster().prepareReroute() .setExplain(randomBoolean()) - .add(new AllocateAllocationCommand(new ShardId("test", 0), node_1, true)) + .add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node_1, true)) .execute().actionGet().getState(); assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).id()).get(0).state(), equalTo(ShardRoutingState.INITIALIZING)); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java new file mode 100644 index 00000000000..32312f34e21 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +public class AutoExpandReplicasTests extends ESTestCase { + + public void testParseSettings() { + AutoExpandReplicas autoExpandReplicas = AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "0-5").build()); + assertEquals(0, autoExpandReplicas.getMinReplicas()); + assertEquals(5, autoExpandReplicas.getMaxReplicas(8)); + assertEquals(2, autoExpandReplicas.getMaxReplicas(3)); + + autoExpandReplicas = AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "0-all").build()); + assertEquals(0, autoExpandReplicas.getMinReplicas()); + assertEquals(5, autoExpandReplicas.getMaxReplicas(6)); + assertEquals(2, autoExpandReplicas.getMaxReplicas(3)); + + autoExpandReplicas = AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "1-all").build()); + assertEquals(1, autoExpandReplicas.getMinReplicas()); + assertEquals(5, autoExpandReplicas.getMaxReplicas(6)); + assertEquals(2, autoExpandReplicas.getMaxReplicas(3)); + + } + + public void testInvalidValues() { + try { + AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "boom").build()); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("failed to parse [index.auto_expand_replicas] from value: [boom] at index -1", ex.getMessage()); + } + + try { + AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "1-boom").build()); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("failed to parse [index.auto_expand_replicas] from value: [1-boom] at index 1", ex.getMessage()); + assertEquals("For input string: \"boom\"", ex.getCause().getMessage()); + } + + try { + AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "boom-1").build()); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("failed to parse [index.auto_expand_replicas] from value: [boom-1] at index 4", ex.getMessage()); + assertEquals("For input string: \"boom\"", ex.getCause().getMessage()); + } + + try { + AutoExpandReplicas.SETTING.get(Settings.builder().put("index.auto_expand_replicas", "2-1").build()); + } catch (IllegalArgumentException ex) { + assertEquals("[index.auto_expand_replicas] minReplicas must be =< maxReplicas but wasn't 2 > 1", ex.getMessage()); + } + + } +} diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java new file mode 100644 index 00000000000..a43da9e53fa --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.Version; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; + +import java.util.Collections; + +public class MetaDataIndexUpgradeServiceTests extends ESTestCase { + + public void testArchiveBrokenIndexSettings() { + MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + IndexMetaData src = newIndexMeta("foo", Settings.EMPTY); + IndexMetaData indexMetaData = service.archiveBrokenIndexSettings(src); + assertSame(indexMetaData, src); + + src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); + indexMetaData = service.archiveBrokenIndexSettings(src); + assertNotSame(indexMetaData, src); + assertEquals("-200", indexMetaData.getSettings().get("archived.index.refresh_interval")); + + src = newIndexMeta("foo", Settings.builder().put("index.codec", "best_compression1").build()); + indexMetaData = service.archiveBrokenIndexSettings(src); + assertNotSame(indexMetaData, src); + assertEquals("best_compression1", indexMetaData.getSettings().get("archived.index.codec")); + + src = newIndexMeta("foo", Settings.builder().put("index.refresh.interval", "-1").build()); + indexMetaData = service.archiveBrokenIndexSettings(src); + assertNotSame(indexMetaData, src); + assertEquals("-1", indexMetaData.getSettings().get("archived.index.refresh.interval")); + + src = newIndexMeta("foo", indexMetaData.getSettings()); // double archive? + indexMetaData = service.archiveBrokenIndexSettings(src); + assertSame(indexMetaData, src); + } + + public void testUpgrade() { + MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); + assertFalse(service.isUpgraded(src)); + src = service.upgradeIndexMetaData(src); + assertTrue(service.isUpgraded(src)); + assertEquals("-200", src.getSettings().get("archived.index.refresh_interval")); + assertNull(src.getSettings().get("index.refresh_interval")); + assertSame(src, service.upgradeIndexMetaData(src)); // no double upgrade + } + + public void testIsUpgraded() { + MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); + assertFalse(service.isUpgraded(src)); + Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion()); + src = newIndexMeta("foo", Settings.builder().put(IndexMetaData.SETTING_VERSION_UPGRADED, version).build()); + assertFalse(service.isUpgraded(src)); + src = newIndexMeta("foo", Settings.builder().put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.CURRENT).build()); + assertTrue(service.isUpgraded(src)); + } + + public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { + Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_CREATION_DATE, 1) + .put(IndexMetaData.SETTING_INDEX_UUID, "BOOM") + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_0_18_1_ID) + .put(indexSettings) + .build(); + IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); + return metaData; + } + +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationIT.java index 2d704380ae0..169e68f4637 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/DelayedAllocationIT.java @@ -47,7 +47,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, 0)).get(); + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0)).get(); ensureGreen("test"); indexRandomData(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); @@ -66,7 +66,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get(); + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1))).get(); ensureGreen("test"); indexRandomData(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); @@ -90,14 +90,14 @@ public class DelayedAllocationIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get(); + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(100))).get(); ensureGreen("test"); indexRandomData(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); ensureGreen("test"); internalCluster().startNode(); // do a second round with longer delay to make sure it happens - assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get()); + assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(100))).get()); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); ensureGreen("test"); } @@ -112,7 +112,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get(); + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1))).get(); ensureGreen("test"); indexRandomData(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); @@ -123,7 +123,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { } }); assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(1)); - assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get()); + assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(100))).get()); ensureGreen("test"); assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(0)); } @@ -138,7 +138,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get(); + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1))).get(); ensureGreen("test"); indexRandomData(); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard())); @@ -149,7 +149,7 @@ public class DelayedAllocationIT extends ESIntegTestCase { } }); assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(1)); - assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(0))).get()); + assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMillis(0))).get()); ensureGreen("test"); assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(0)); } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index dc468ded962..9203b270b2f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -19,10 +19,17 @@ package org.elasticsearch.cluster.routing; * under the License. */ +import com.carrotsearch.hppc.cursors.IntObjectCursor; +import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequestBuilder; +import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; +import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.disruption.NetworkDisconnectPartition; @@ -33,11 +40,13 @@ import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.concurrent.ExecutionException; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) @@ -50,7 +59,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase { return pluginList(MockTransportService.TestPlugin.class); } - public void testDoNotAllowStaleReplicasToBePromotedToPrimary() throws Exception { + private void createStaleReplicaScenario() throws Exception { logger.info("--> starting 3 nodes, 1 master, 2 data"); String master = internalCluster().startMasterOnlyNode(Settings.EMPTY); internalCluster().startDataOnlyNodesAsync(2).get(); @@ -103,6 +112,10 @@ public class PrimaryAllocationIT extends ESIntegTestCase { assertBusy(() -> assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0))); // kick reroute a second time and check that all shards are unassigned assertThat(client(master).admin().cluster().prepareReroute().get().getState().getRoutingNodes().unassigned().size(), equalTo(2)); + } + + public void testDoNotAllowStaleReplicasToBePromotedToPrimary() throws Exception { + createStaleReplicaScenario(); logger.info("--> starting node that reuses data folder with the up-to-date primary shard"); internalCluster().startDataOnlyNode(Settings.EMPTY); @@ -112,6 +125,67 @@ public class PrimaryAllocationIT extends ESIntegTestCase { assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get(), 2l); } + public void testFailedAllocationOfStalePrimaryToDataNodeWithNoData() throws Exception { + String dataNodeWithShardCopy = internalCluster().startNode(); + + logger.info("--> create single shard index"); + assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get()); + ensureGreen("test"); + + String dataNodeWithNoShardCopy = internalCluster().startNode(); + ensureStableCluster(2); + + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(dataNodeWithShardCopy)); + ensureStableCluster(1); + assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").getShards().get(0).primaryShard().unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NODE_LEFT)); + + logger.info("--> force allocation of stale copy to node that does not have shard copy"); + client().admin().cluster().prepareReroute().add(new AllocateStalePrimaryAllocationCommand(new ShardId("test", 0), dataNodeWithNoShardCopy, true)).get(); + + logger.info("--> wait until shard is failed and becomes unassigned again"); + assertBusy(() -> assertTrue(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").allPrimaryShardsUnassigned())); + assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().index("test").getShards().get(0).primaryShard().unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED)); + } + + public void testForceStaleReplicaToBePromotedToPrimary() throws Exception { + boolean useStaleReplica = randomBoolean(); // if true, use stale replica, otherwise a completely empty copy + createStaleReplicaScenario(); + + logger.info("--> explicitly promote old primary shard"); + ImmutableOpenIntMap> storeStatuses = client().admin().indices().prepareShardStores("test").get().getStoreStatuses().get("test"); + ClusterRerouteRequestBuilder rerouteBuilder = client().admin().cluster().prepareReroute(); + for (IntObjectCursor> shardStoreStatuses : storeStatuses) { + int shardId = shardStoreStatuses.key; + IndicesShardStoresResponse.StoreStatus storeStatus = randomFrom(shardStoreStatuses.value); + logger.info("--> adding allocation command for shard " + shardId); + // force allocation based on node id + if (useStaleReplica) { + rerouteBuilder.add(new AllocateStalePrimaryAllocationCommand(new ShardId("test", shardId), storeStatus.getNode().getId(), true)); + } else { + rerouteBuilder.add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", shardId), storeStatus.getNode().getId(), true)); + } + } + rerouteBuilder.get(); + + logger.info("--> check that the stale primary shard gets allocated and that documents are available"); + ensureYellow("test"); + + assertHitCount(client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(), useStaleReplica ? 1l : 0l); + } + + public void testForcePrimaryShardIfAllocationDecidersSayNoAfterIndexCreation() throws ExecutionException, InterruptedException { + String node = internalCluster().startNode(); + client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + .put("index.routing.allocation.exclude._name", node) + .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get(); + + assertThat(client().admin().cluster().prepareState().get().getState().getRoutingTable().shardRoutingTable("test", 0).assignedShards(), empty()); + + client().admin().cluster().prepareReroute().add(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), node, true)).get(); + ensureGreen("test"); + } + public void testNotWaitForQuorumCopies() throws Exception { logger.info("--> starting 3 nodes"); internalCluster().startNodesAsync(3).get(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java index 1711b0c33a8..e50272d2b08 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java @@ -69,7 +69,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { public void testNoDelayedUnassigned() throws Exception { AllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "0")) + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "0")) .numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) @@ -97,7 +97,7 @@ public class RoutingServiceTests extends ESAllocationTestCase { public void testDelayedUnassignedScheduleReroute() throws Exception { MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "100ms")) + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")) .numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) @@ -144,9 +144,9 @@ public class RoutingServiceTests extends ESAllocationTestCase { try { MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator()); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("short_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "100ms")) + .put(IndexMetaData.builder("short_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms")) .numberOfShards(1).numberOfReplicas(1)) - .put(IndexMetaData.builder("long_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10s")) + .put(IndexMetaData.builder("long_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10s")) .numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 20a731b0153..e277080ebf7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -259,7 +259,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { public void testUnassignedDelayedOnlyOnNodeLeft() throws Exception { final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, null); long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay - Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY); + Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY); long cachedDelay = unassignedInfo.getLastComputedLeftDelayNanos(); assertThat(delay, equalTo(cachedDelay)); assertThat(delay, equalTo(TimeValue.timeValueHours(10).nanos() - 1)); @@ -273,7 +273,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { reasons.remove(UnassignedInfo.Reason.NODE_LEFT); UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), reasons), null); long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay - Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY); + Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10h").build(), Settings.EMPTY); assertThat(delay, equalTo(0l)); delay = unassignedInfo.getLastComputedLeftDelayNanos(); assertThat(delay, equalTo(0l)); @@ -286,7 +286,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase { final long baseTime = System.nanoTime(); final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, baseTime, System.currentTimeMillis()); final long totalDelayNanos = TimeValue.timeValueMillis(10).nanos(); - final Settings settings = Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueNanos(totalDelayNanos)).build(); + final Settings settings = Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueNanos(totalDelayNanos)).build(); long delay = unassignedInfo.updateDelay(baseTime, settings, Settings.EMPTY); assertThat(delay, equalTo(totalDelayNanos)); assertThat(delay, equalTo(unassignedInfo.getLastComputedLeftDelayNanos())); @@ -336,8 +336,8 @@ public class UnassignedInfoTests extends ESAllocationTestCase { final long expectMinDelaySettingsNanos = Math.min(delayTest1.nanos(), delayTest2.nanos()); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, delayTest1)).numberOfShards(1).numberOfReplicas(1)) - .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, delayTest2)).numberOfShards(1).numberOfReplicas(1)) + .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayTest1)).numberOfShards(1).numberOfReplicas(1)) + .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayTest2)).numberOfShards(1).numberOfReplicas(1)) .build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) .metaData(metaData) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 4c4fa72a6ec..65c2e577015 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -26,7 +26,10 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.cluster.routing.allocation.command.AllocateAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AbstractAllocateAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; +import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; @@ -38,7 +41,9 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.test.ESAllocationTestCase; import static java.util.Collections.singletonMap; @@ -46,6 +51,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; /** @@ -96,6 +102,14 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node(toNodeId).get(0).state(), equalTo(ShardRoutingState.STARTED)); } + private AbstractAllocateAllocationCommand randomAllocateCommand(ShardId shardId, String node) { + return randomFrom( + new AllocateReplicaAllocationCommand(shardId, node), + new AllocateEmptyPrimaryAllocationCommand(shardId, node, true), + new AllocateStalePrimaryAllocationCommand(shardId, node, true) + ); + } + public void testAllocateCommand() { AllocationService allocation = createAllocationService(settingsBuilder() .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none") @@ -106,10 +120,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); + // shard routing is added as "from recovery" instead of "new index creation" so that we can test below that allocating an empty + // primary with accept_data_loss flag set to false fails RoutingTable routingTable = RoutingTable.builder() - .addAsNew(metaData.index("test")) + .addAsRecovery(metaData.index("test")) .build(); ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.DEFAULT).metaData(metaData).routingTable(routingTable).build(); + ShardId shardId = new ShardId("test", 0); logger.info("--> adding 3 nodes on same rack and do rerouting"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() @@ -122,22 +139,56 @@ public class AllocationCommandsTests extends ESAllocationTestCase { clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); - logger.info("--> allocating with primary flag set to false, should fail"); + logger.info("--> allocating to non-existent node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node1", false))); - fail(); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(shardId, "node42"))); + fail("expected IllegalArgumentException when allocating to non-existing node"); } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("failed to resolve [node42], no matching nodes")); } logger.info("--> allocating to non-data node, should fail"); try { - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node4", true))); - fail(); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(shardId, "node4"))); + fail("expected IllegalArgumentException when allocating to non-data node"); } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("allocation can only be done on data nodes")); } - logger.info("--> allocating with primary flag set to true"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node1", true))); + logger.info("--> allocating non-existing shard, should fail"); + try { + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test", 1), "node2"))); + fail("expected ShardNotFoundException when allocating non-existing shard"); + } catch (ShardNotFoundException e) { + assertThat(e.getMessage(), containsString("no such shard")); + } + + logger.info("--> allocating non-existing index, should fail"); + try { + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test2", 0), "node2"))); + fail("expected ShardNotFoundException when allocating non-existing index"); + } catch (IndexNotFoundException e) { + assertThat(e.getMessage(), containsString("no such index")); + } + + logger.info("--> allocating empty primary with acceptDataLoss flag set to false"); + try { + allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", false))); + fail("expected IllegalArgumentException when allocating empty primary with acceptDataLoss flag set to false"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true")); + } + + logger.info("--> allocating stale primary with acceptDataLoss flag set to false"); + try { + allocation.reroute(clusterState, new AllocationCommands(new AllocateStalePrimaryAllocationCommand(shardId, "node1", false))); + fail("expected IllegalArgumentException when allocating stale primary with acceptDataLoss flag set to false"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("allocating an empty primary for " + shardId + " can result in data loss. Please confirm by setting the accept_data_loss parameter to true")); + } + + logger.info("--> allocating empty primary with acceptDataLoss flag set to true"); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", true))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -153,13 +204,13 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> allocate the replica shard on the primary shard node, should fail"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node1", false))); - fail(); + allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node1"))); + fail("expected IllegalArgumentException when allocating replica shard on the primary shard node"); } catch (IllegalArgumentException e) { } logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -178,8 +229,8 @@ public class AllocationCommandsTests extends ESAllocationTestCase { logger.info("--> verify that we fail when there are no unassigned shards"); try { - allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node3", false))); - fail(); + allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand(new ShardId("test", 0), "node3"))); + fail("expected IllegalArgumentException when allocating shard while no unassigned shard available"); } catch (IllegalArgumentException e) { } } @@ -209,8 +260,8 @@ public class AllocationCommandsTests extends ESAllocationTestCase { clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(0)); - logger.info("--> allocating with primary flag set to true"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node1", true))); + logger.info("--> allocating empty primary shard with accept_data_loss flag set to true"); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 0), "node1", true))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -239,7 +290,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { } logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -257,7 +308,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); assertThat(rerouteResult.changed(), equalTo(true)); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -290,7 +341,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { assertThat(clusterState.getRoutingNodes().node("node3").size(), equalTo(0)); logger.info("--> allocate the replica shard on on the second node"); - rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateAllocationCommand(new ShardId("test", 0), "node2", false))); + rerouteResult = allocation.reroute(clusterState, new AllocationCommands(new AllocateReplicaAllocationCommand(new ShardId("test", 0), "node2"))); clusterState = ClusterState.builder(clusterState).routingTable(rerouteResult.routingTable()).build(); assertThat(rerouteResult.changed(), equalTo(true)); assertThat(clusterState.getRoutingNodes().node("node1").size(), equalTo(1)); @@ -335,7 +386,9 @@ public class AllocationCommandsTests extends ESAllocationTestCase { public void testSerialization() throws Exception { AllocationCommands commands = new AllocationCommands( - new AllocateAllocationCommand(new ShardId("test", 1), "node1", true), + new AllocateEmptyPrimaryAllocationCommand(new ShardId("test", 1), "node1", true), + new AllocateStalePrimaryAllocationCommand(new ShardId("test", 2), "node1", true), + new AllocateReplicaAllocationCommand(new ShardId("test", 2), "node1"), new MoveAllocationCommand(new ShardId("test", 3), "node2", "node3"), new CancelAllocationCommand(new ShardId("test", 4), "node5", true) ); @@ -343,24 +396,33 @@ public class AllocationCommandsTests extends ESAllocationTestCase { AllocationCommands.writeTo(commands, bytes); AllocationCommands sCommands = AllocationCommands.readFrom(StreamInput.wrap(bytes.bytes())); - assertThat(sCommands.commands().size(), equalTo(3)); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1))); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1")); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).allowPrimary(), equalTo(true)); + assertThat(sCommands.commands().size(), equalTo(5)); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1))); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1")); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).acceptDataLoss(), equalTo(true)); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 3))); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).fromNode(), equalTo("node2")); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).toNode(), equalTo("node3")); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).node(), equalTo("node1")); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).acceptDataLoss(), equalTo(true)); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 4))); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node5")); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).allowPrimary(), equalTo(true)); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node1")); + + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(new ShardId("test", 3))); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).fromNode(), equalTo("node2")); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).toNode(), equalTo("node3")); + + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(new ShardId("test", 4))); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).node(), equalTo("node5")); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).allowPrimary(), equalTo(true)); } public void testXContent() throws Exception { String commands = "{\n" + " \"commands\" : [\n" + - " {\"allocate\" : {\"index\" : \"test\", \"shard\" : 1, \"node\" : \"node1\", \"allow_primary\" : true}}\n" + + " {\"allocate_empty_primary\" : {\"index\" : \"test\", \"shard\" : 1, \"node\" : \"node1\", \"accept_data_loss\" : true}}\n" + + " ,{\"allocate_stale_primary\" : {\"index\" : \"test\", \"shard\" : 2, \"node\" : \"node1\", \"accept_data_loss\" : true}}\n" + + " ,{\"allocate_replica\" : {\"index\" : \"test\", \"shard\" : 2, \"node\" : \"node1\"}}\n" + " ,{\"move\" : {\"index\" : \"test\", \"shard\" : 3, \"from_node\" : \"node2\", \"to_node\" : \"node3\"}} \n" + " ,{\"cancel\" : {\"index\" : \"test\", \"shard\" : 4, \"node\" : \"node5\", \"allow_primary\" : true}} \n" + " ]\n" + @@ -371,17 +433,24 @@ public class AllocationCommandsTests extends ESAllocationTestCase { parser.nextToken(); AllocationCommands sCommands = AllocationCommands.fromXContent(parser); - assertThat(sCommands.commands().size(), equalTo(3)); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1))); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1")); - assertThat(((AllocateAllocationCommand) (sCommands.commands().get(0))).allowPrimary(), equalTo(true)); + assertThat(sCommands.commands().size(), equalTo(5)); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).shardId(), equalTo(new ShardId("test", 1))); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).node(), equalTo("node1")); + assertThat(((AllocateEmptyPrimaryAllocationCommand) (sCommands.commands().get(0))).acceptDataLoss(), equalTo(true)); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 3))); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).fromNode(), equalTo("node2")); - assertThat(((MoveAllocationCommand) (sCommands.commands().get(1))).toNode(), equalTo("node3")); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).node(), equalTo("node1")); + assertThat(((AllocateStalePrimaryAllocationCommand) (sCommands.commands().get(1))).acceptDataLoss(), equalTo(true)); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 4))); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node5")); - assertThat(((CancelAllocationCommand) (sCommands.commands().get(2))).allowPrimary(), equalTo(true)); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).shardId(), equalTo(new ShardId("test", 2))); + assertThat(((AllocateReplicaAllocationCommand) (sCommands.commands().get(2))).node(), equalTo("node1")); + + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).shardId(), equalTo(new ShardId("test", 3))); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).fromNode(), equalTo("node2")); + assertThat(((MoveAllocationCommand) (sCommands.commands().get(3))).toNode(), equalTo("node3")); + + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).shardId(), equalTo(new ShardId("test", 4))); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).node(), equalTo("node5")); + assertThat(((CancelAllocationCommand) (sCommands.commands().get(4))).allowPrimary(), equalTo(true)); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java index fa9f4065dc1..827da901dc9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java @@ -753,7 +753,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(0)) - .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "_id", "node1,node2")).numberOfShards(2).numberOfReplicas(0)) + .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "_id", "node1,node2")).numberOfShards(2).numberOfReplicas(0)) .build(); // we use a second index here (test1) that never gets assigned otherwise allocateUnassinged is never called if we don't have unassigned shards. diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java index dd3f3f373ff..ac539c421b0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java @@ -54,7 +54,7 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 4) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE, 2))) + .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 2))) .build(); RoutingTable routingTable = RoutingTable.builder() @@ -212,12 +212,12 @@ public class ShardsLimitAllocationTests extends ESAllocationTestCase { assertThat(shardRouting.index(), equalTo("test1")); } - logger.info("update " + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE + " for test, see that things move"); + logger.info("update " + ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey() + " for test, see that things move"); metaData = MetaData.builder(metaData) .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 5) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE, 3) + .put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 3) )) .build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java index be64aafc61e..ae840abd1a8 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDeciderIT.java @@ -49,14 +49,14 @@ public class EnableAllocationDeciderIT extends ESIntegTestCase { final String secondNode = internalCluster().startNode(); // prevent via index setting but only on index test - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE)).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE)).get(); client().admin().cluster().prepareReroute().get(); ensureGreen(); assertAllShardsOnNodes("test", firstNode); assertAllShardsOnNodes("test_1", firstNode); // now enable the index test to relocate since index settings override cluster settings - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, randomBoolean() ? EnableAllocationDecider.Rebalance.PRIMARIES : EnableAllocationDecider.Rebalance.ALL)).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), randomBoolean() ? EnableAllocationDecider.Rebalance.PRIMARIES : EnableAllocationDecider.Rebalance.ALL)).get(); logger.info("--> balance index [test]"); client().admin().cluster().prepareReroute().get(); ensureGreen("test"); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java index b2559c29ed2..62005a3fa1d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java @@ -44,7 +44,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING; -import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.hamcrest.Matchers.equalTo; @@ -121,7 +121,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("disabled").settings(settings(Version.CURRENT) - .put(INDEX_ROUTING_ALLOCATION_ENABLE, Allocation.NONE.name())) + .put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), Allocation.NONE.name())) .numberOfShards(1).numberOfReplicas(1)) .put(IndexMetaData.builder("enabled").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); @@ -163,12 +163,12 @@ public class EnableAllocationTests extends ESAllocationTestCase { .build(); ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); AllocationService strategy = createAllocationService(build, clusterSettings, getRandom()); - Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE).build(); + Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE).build(); logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(indexSettings)).numberOfShards(3).numberOfReplicas(1)) - .put(IndexMetaData.builder("always_disabled").settings(settings(Version.CURRENT).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE)).numberOfShards(1).numberOfReplicas(1)) + .put(IndexMetaData.builder("always_disabled").settings(settings(Version.CURRENT).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE)).numberOfShards(1).numberOfReplicas(1)) .build(); RoutingTable routingTable = RoutingTable.builder() @@ -219,7 +219,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { IndexMetaData meta = clusterState.getMetaData().index("test"); IndexMetaData meta1 = clusterState.getMetaData().index("always_disabled"); clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).removeAllIndices().put(IndexMetaData.builder(meta1)) - .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, allowedOnes).build()))) + .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), allowedOnes).build()))) .build(); } @@ -265,7 +265,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { .build(); ClusterSettings clusterSettings = new ClusterSettings(build, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); AllocationService strategy = createAllocationService(build, clusterSettings, getRandom()); - Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, Rebalance.NONE).build(); + Settings indexSettings = useClusterSetting ? Settings.EMPTY : settingsBuilder().put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE).build(); logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() @@ -312,7 +312,7 @@ public class EnableAllocationTests extends ESAllocationTestCase { prevState = clusterState; IndexMetaData meta = clusterState.getMetaData().index("test"); clusterState = ClusterState.builder(clusterState).metaData(MetaData.builder(metaData).removeAllIndices() - .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL).build()))).build(); + .put(IndexMetaData.builder(meta).settings(settingsBuilder().put(meta.getSettings()).put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), randomBoolean() ? Rebalance.PRIMARIES : Rebalance.ALL).build()))).build(); } clusterSettings.applySettings(clusterState.metaData().settings()); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index f2a9a641a80..80df54518ba 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -265,7 +265,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { .get(); fail("bogus value"); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse setting [discovery.zen.commit_timeout] with value [whatever] as a time value: unit is missing or unrecognized"); + assertEquals(ex.getMessage(), "Failed to parse setting [discovery.zen.publish_timeout] with value [whatever] as a time value: unit is missing or unrecognized"); } assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1l)); diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java index 6a16f906886..97e527d1a0a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java @@ -23,8 +23,10 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -62,6 +64,10 @@ public class SettingsFilteringIT extends ESIntegTestCase { return "Settings Filtering Plugin"; } + public void onModule(SettingsModule module) { + module.registerSetting(Setting.groupSetting("index.filter_test.", false, Setting.Scope.INDEX)); + } + @Override public Collection nodeModules() { return Collections.singletonList(new SettingsFilteringModule()); diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsValidatorTests.java b/core/src/test/java/org/elasticsearch/cluster/settings/SettingsValidatorTests.java deleted file mode 100644 index 498acef2eb9..00000000000 --- a/core/src/test/java/org/elasticsearch/cluster/settings/SettingsValidatorTests.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.settings; - -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class SettingsValidatorTests extends ESTestCase { - public void testValidators() throws Exception { - assertThat(Validator.EMPTY.validate("", "anything goes", null), nullValue()); - - assertThat(Validator.TIME.validate("", "10m", null), nullValue()); - assertThat(Validator.TIME.validate("", "10g", null), notNullValue()); - assertThat(Validator.TIME.validate("", "bad timing", null), notNullValue()); - - assertThat(Validator.BYTES_SIZE.validate("", "10m", null), nullValue()); - assertThat(Validator.BYTES_SIZE.validate("", "10g", null), nullValue()); - assertThat(Validator.BYTES_SIZE.validate("", "bad", null), notNullValue()); - - assertThat(Validator.FLOAT.validate("", "10.2", null), nullValue()); - assertThat(Validator.FLOAT.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "10.2", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "0.0", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "-1.0", null), notNullValue()); - assertThat(Validator.NON_NEGATIVE_FLOAT.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.DOUBLE.validate("", "10.2", null), nullValue()); - assertThat(Validator.DOUBLE.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.DOUBLE_GTE_2.validate("", "10.2", null), nullValue()); - assertThat(Validator.DOUBLE_GTE_2.validate("", "2.0", null), nullValue()); - assertThat(Validator.DOUBLE_GTE_2.validate("", "1.0", null), notNullValue()); - assertThat(Validator.DOUBLE_GTE_2.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "10.2", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "0.0", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "-1.0", null), notNullValue()); - assertThat(Validator.NON_NEGATIVE_DOUBLE.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.INTEGER.validate("", "10", null), nullValue()); - assertThat(Validator.INTEGER.validate("", "10.2", null), notNullValue()); - - assertThat(Validator.INTEGER_GTE_2.validate("", "2", null), nullValue()); - assertThat(Validator.INTEGER_GTE_2.validate("", "1", null), notNullValue()); - assertThat(Validator.INTEGER_GTE_2.validate("", "0", null), notNullValue()); - assertThat(Validator.INTEGER_GTE_2.validate("", "10.2.3", null), notNullValue()); - - assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "2", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "1", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "0", null), nullValue()); - assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "-1", null), notNullValue()); - assertThat(Validator.NON_NEGATIVE_INTEGER.validate("", "10.2", null), notNullValue()); - - assertThat(Validator.POSITIVE_INTEGER.validate("", "2", null), nullValue()); - assertThat(Validator.POSITIVE_INTEGER.validate("", "1", null), nullValue()); - assertThat(Validator.POSITIVE_INTEGER.validate("", "0", null), notNullValue()); - assertThat(Validator.POSITIVE_INTEGER.validate("", "-1", null), notNullValue()); - assertThat(Validator.POSITIVE_INTEGER.validate("", "10.2", null), notNullValue()); - - assertThat(Validator.PERCENTAGE.validate("", "asdasd", null), notNullValue()); - assertThat(Validator.PERCENTAGE.validate("", "-1", null), notNullValue()); - assertThat(Validator.PERCENTAGE.validate("", "20", null), notNullValue()); - assertThat(Validator.PERCENTAGE.validate("", "-1%", null), notNullValue()); - assertThat(Validator.PERCENTAGE.validate("", "101%", null), notNullValue()); - assertThat(Validator.PERCENTAGE.validate("", "100%", null), nullValue()); - assertThat(Validator.PERCENTAGE.validate("", "99%", null), nullValue()); - assertThat(Validator.PERCENTAGE.validate("", "0%", null), nullValue()); - - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "asdasd", null), notNullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "20", null), notNullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "20mb", null), nullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "-1%", null), notNullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "101%", null), notNullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "100%", null), nullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "99%", null), nullValue()); - assertThat(Validator.BYTES_SIZE_OR_PERCENTAGE.validate("", "0%", null), nullValue()); - } - - public void testDynamicValidators() throws Exception { - DynamicSettings.Builder ds = new DynamicSettings.Builder(); - ds.addSetting("my.test.*", Validator.POSITIVE_INTEGER); - String valid = ds.build().validateDynamicSetting("my.test.setting", "-1", null); - assertThat(valid, equalTo("the value of the setting my.test.setting must be a positive integer")); - } -} diff --git a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java index e5d27b872fb..ca33288e874 100644 --- a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java @@ -28,16 +28,24 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.junit.Assert; import java.io.IOException; +import java.util.Collection; import static org.hamcrest.Matchers.containsString; public class CodecTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testAcceptPostingsFormat() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").field("postings_format", Codec.getDefault().postingsFormat().getName()).endObject().endObject() diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java index 8f8e8b4ea1c..b10763a95b0 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java @@ -34,6 +34,7 @@ import static org.hamcrest.Matchers.lessThan; * Basic Tests for {@link GeoDistance} */ public class GeoDistanceTests extends ESTestCase { + public void testGeoDistanceSerialization() throws IOException { // make sure that ordinals don't change, because we rely on then in serialization assertThat(GeoDistance.PLANE.ordinal(), equalTo(0)); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 03294b8e49c..279e31aadd4 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.not; public abstract class AbstractShapeBuilderTestCase extends ESTestCase { - private static final int NUMBER_OF_TESTBUILDERS = 100; + private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; /** diff --git a/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java b/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java index 8f9c9009071..ed8a5cffbf4 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java @@ -27,6 +27,7 @@ import org.apache.log4j.spi.LoggingEvent; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -53,8 +54,8 @@ public class Log4jESLoggerTests extends ESTestCase { Path configDir = getDataPath("config"); // Need to set custom path.conf so we can use a custom logging.yml file for the test Settings settings = Settings.builder() - .put("path.conf", configDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); LogConfigurator.configure(settings, true); diff --git a/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java b/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java index 2a08dd1e55c..5d90edaf7a5 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java @@ -54,8 +54,8 @@ public class LoggingConfigurationTests extends ESTestCase { try { Path configDir = getDataPath("config"); Settings settings = Settings.builder() - .put("path.conf", configDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); LogConfigurator.configure(settings, true); @@ -84,8 +84,8 @@ public class LoggingConfigurationTests extends ESTestCase { Files.write(loggingConf, "{\"json\": \"foo\"}".getBytes(StandardCharsets.UTF_8)); Environment environment = new Environment( Settings.builder() - .put("path.conf", tmpDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build()); Settings.Builder builder = Settings.builder(); @@ -101,8 +101,8 @@ public class LoggingConfigurationTests extends ESTestCase { Files.write(loggingConf, "key: value".getBytes(StandardCharsets.UTF_8)); Environment environment = new Environment( Settings.builder() - .put("path.conf", tmpDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build()); Settings.Builder builder = Settings.builder(); @@ -120,8 +120,8 @@ public class LoggingConfigurationTests extends ESTestCase { Files.write(loggingConf2, "yaml: bar".getBytes(StandardCharsets.UTF_8)); Environment environment = new Environment( Settings.builder() - .put("path.conf", tmpDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build()); Settings.Builder builder = Settings.builder(); @@ -138,8 +138,8 @@ public class LoggingConfigurationTests extends ESTestCase { Files.write(invalidSuffix, "yml: bar".getBytes(StandardCharsets.UTF_8)); Environment environment = new Environment( Settings.builder() - .put("path.conf", invalidSuffix.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), invalidSuffix.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build()); Settings.Builder builder = Settings.builder(); @@ -157,8 +157,8 @@ public class LoggingConfigurationTests extends ESTestCase { Files.write(loggingConf, "appender.file.type: file\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND); Environment environment = InternalSettingsPreparer.prepareEnvironment( Settings.builder() - .put("path.conf", tmpDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("logger.test_resolve_order", "TRACE, console") .put("appender.console.type", "console") .put("appender.console.layout.type", "consolePattern") @@ -186,8 +186,8 @@ public class LoggingConfigurationTests extends ESTestCase { StandardCharsets.UTF_8); Environment environment = InternalSettingsPreparer.prepareEnvironment( Settings.builder() - .put("path.conf", tmpDir.toAbsolutePath()) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(), new CliToolTestCase.MockTerminal()); LogConfigurator.configure(environment.settings(), false); ESLogger esLogger = Log4jESLoggerFactory.getLogger("test_config_not_read"); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java b/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java index 61660f96742..0c14e1a0bcb 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java @@ -22,8 +22,10 @@ package org.elasticsearch.common.lucene; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.index.shard.ShardId; @@ -55,6 +57,25 @@ public class ShardCoreKeyMapTests extends ESTestCase { } } + public void testAddingAClosedReader() throws Exception { + LeafReader reader; + try (Directory dir = newDirectory(); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { + writer.addDocument(new Document()); + try (DirectoryReader dirReader = ElasticsearchDirectoryReader.wrap(writer.getReader(), new ShardId("index1", 1))) { + reader = dirReader.leaves().get(0).reader(); + } + } + ShardCoreKeyMap map = new ShardCoreKeyMap(); + try { + map.add(reader); + fail("Expected AlreadyClosedException"); + } catch (AlreadyClosedException e) { + // What we wanted + } + assertEquals(0, map.size()); + } + public void testBasics() throws IOException { Directory dir1 = newDirectory(); RandomIndexWriter w1 = new RandomIndexWriter(random(), dir1); diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index 798e82a979e..b8a21e1b678 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -104,36 +104,36 @@ public class NetworkModuleTests extends ModuleTestCase { public void testRegisterTransportService() { Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "custom").build(); - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, null); module.registerTransportService("custom", FakeTransportService.class); assertBinding(module, TransportService.class, FakeTransportService.class); // check it works with transport only as well - module = new NetworkModule(new NetworkService(settings), settings, true); + module = new NetworkModule(new NetworkService(settings), settings, true, null); module.registerTransportService("custom", FakeTransportService.class); assertBinding(module, TransportService.class, FakeTransportService.class); } public void testRegisterTransport() { Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "custom").build(); - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, null); module.registerTransport("custom", FakeTransport.class); assertBinding(module, Transport.class, FakeTransport.class); // check it works with transport only as well - module = new NetworkModule(new NetworkService(settings), settings, true); + module = new NetworkModule(new NetworkService(settings), settings, true, null); module.registerTransport("custom", FakeTransport.class); assertBinding(module, Transport.class, FakeTransport.class); } public void testRegisterHttpTransport() { Settings settings = Settings.builder().put(NetworkModule.HTTP_TYPE_KEY, "custom").build(); - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, null); module.registerHttpTransport("custom", FakeHttpTransport.class); assertBinding(module, HttpServerTransport.class, FakeHttpTransport.class); // check registration not allowed for transport only - module = new NetworkModule(new NetworkService(settings), settings, true); + module = new NetworkModule(new NetworkService(settings), settings, true, null); try { module.registerHttpTransport("custom", FakeHttpTransport.class); fail(); @@ -143,20 +143,20 @@ public class NetworkModuleTests extends ModuleTestCase { } // not added if http is disabled - settings = Settings.builder().put(NetworkModule.HTTP_ENABLED, false).build(); - module = new NetworkModule(new NetworkService(settings), settings, false); + settings = Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false).build(); + module = new NetworkModule(new NetworkService(settings), settings, false, null); assertNotBound(module, HttpServerTransport.class); } public void testRegisterRestHandler() { Settings settings = Settings.EMPTY; - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, null); module.registerRestHandler(FakeRestHandler.class); // also check a builtin is bound assertSetMultiBinding(module, RestHandler.class, FakeRestHandler.class, RestMainAction.class); // check registration not allowed for transport only - module = new NetworkModule(new NetworkService(settings), settings, true); + module = new NetworkModule(new NetworkService(settings), settings, true, null); try { module.registerRestHandler(FakeRestHandler.class); fail(); @@ -168,7 +168,7 @@ public class NetworkModuleTests extends ModuleTestCase { public void testRegisterCatRestHandler() { Settings settings = Settings.EMPTY; - NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false); + NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, null); module.registerRestHandler(FakeCatRestHandler.class); // also check a builtin is bound assertSetMultiBinding(module, AbstractCatAction.class, FakeCatRestHandler.class, RestNodesAction.class); diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 97393c51b8d..86a37723b57 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -18,8 +18,11 @@ */ package org.elasticsearch.common.settings; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportService; @@ -165,4 +168,90 @@ public class ScopedSettingsTests extends ESTestCase { assertTrue(ref.get().contains("internal:index/shard/recovery/*")); assertTrue(ref.get().contains("internal:gateway/local*")); } + + public void testGetSetting() { + IndexScopedSettings settings = new IndexScopedSettings( + Settings.EMPTY, + IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + IndexScopedSettings copy = settings.copy(Settings.builder().put("index.store.type", "boom").build(), newIndexMeta("foo", Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 3).build())); + assertEquals(3, copy.get(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING).intValue()); + assertEquals(1, copy.get(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING).intValue()); + assertEquals("boom", copy.get(IndexModule.INDEX_STORE_TYPE_SETTING)); // test fallback to node settings + } + + public void testValidate() { + IndexScopedSettings settings = new IndexScopedSettings( + Settings.EMPTY, + IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + settings.validate(Settings.builder().put("index.store.type", "boom")); + settings.validate(Settings.builder().put("index.store.type", "boom").build()); + try { + settings.validate(Settings.builder().put("index.store.type", "boom", "i.am.not.a.setting", true)); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("unknown setting [i.am.not.a.setting]", e.getMessage()); + } + + try { + settings.validate(Settings.builder().put("index.store.type", "boom", "i.am.not.a.setting", true).build()); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("unknown setting [i.am.not.a.setting]", e.getMessage()); + } + + try { + settings.validate(Settings.builder().put("index.store.type", "boom", "index.number_of_replicas", true).build()); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage()); + } + + try { + settings.validate("index.number_of_replicas", Settings.builder().put("index.number_of_replicas", "true").build()); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage()); + } + } + + + public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { + Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(indexSettings) + .build(); + IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); + return metaData; + } + + public void testKeyPattern() { + assertTrue(AbstractScopedSettings.isValidKey("a.b.c-b.d")); + assertTrue(AbstractScopedSettings.isValidKey("a.b.c.d")); + assertTrue(AbstractScopedSettings.isValidKey("a.b_012.c_b.d")); + assertTrue(AbstractScopedSettings.isValidKey("a")); + assertFalse(AbstractScopedSettings.isValidKey("a b")); + assertFalse(AbstractScopedSettings.isValidKey("")); + assertFalse(AbstractScopedSettings.isValidKey("\"")); + + try { + new IndexScopedSettings( + Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo .", false, Setting.Scope.INDEX))); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("illegal settings key: [boo .]", e.getMessage()); + } + new IndexScopedSettings( + Settings.EMPTY, Collections.singleton(Setting.groupSetting("boo.", false, Setting.Scope.INDEX))); + try { + new IndexScopedSettings( + Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo.", true, false, Setting.Scope.INDEX))); + fail(); + } catch (IllegalArgumentException e) { + assertEquals("illegal settings key: [boo.]", e.getMessage()); + } + new IndexScopedSettings( + Settings.EMPTY, Collections.singleton(Setting.boolSetting("boo", true, false, Setting.Scope.INDEX))); + } + } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 71914444725..6f189bd5e19 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -45,7 +45,7 @@ public class SettingTests extends ESTestCase { ByteSizeValue byteSizeValue = byteSizeValueSetting.get(Settings.EMPTY); assertEquals(byteSizeValue.bytes(), 1024); AtomicReference value = new AtomicReference<>(null); - ClusterSettings.SettingUpdater settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger); + ClusterSettings.SettingUpdater settingUpdater = byteSizeValueSetting.newUpdater(value::set, logger); try { settingUpdater.apply(Settings.builder().put("a.byte.size", 12).build(), Settings.EMPTY); fail("no unit"); @@ -60,7 +60,7 @@ public class SettingTests extends ESTestCase { public void testSimpleUpdate() { Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); AtomicReference atomicBoolean = new AtomicReference<>(null); - ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger); + ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(atomicBoolean::set, logger); Settings build = Settings.builder().put("foo.bar", false).build(); settingUpdater.apply(build, Settings.EMPTY); assertNull(atomicBoolean.get()); @@ -94,8 +94,7 @@ public class SettingTests extends ESTestCase { Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.CLUSTER); AtomicReference ab1 = new AtomicReference<>(null); AtomicReference ab2 = new AtomicReference<>(null); - ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger); - ClusterSettings.SettingUpdater settingUpdater2 = booleanSetting.newUpdater(ab2::set, logger); + ClusterSettings.SettingUpdater settingUpdater = booleanSetting.newUpdater(ab1::set, logger); settingUpdater.apply(Settings.builder().put("foo.bar", true).build(), Settings.EMPTY); assertTrue(ab1.get()); assertNull(ab2.get()); @@ -105,13 +104,17 @@ public class SettingTests extends ESTestCase { TimeValue defautlValue = TimeValue.timeValueMillis(randomIntBetween(0, 1000000)); Setting setting = Setting.positiveTimeSetting("my.time.value", defautlValue, randomBoolean(), Setting.Scope.CLUSTER); assertFalse(setting.isGroupSetting()); - String aDefault = setting.getDefault(Settings.EMPTY); + String aDefault = setting.getDefaultRaw(Settings.EMPTY); assertEquals(defautlValue.millis() + "ms", aDefault); assertEquals(defautlValue.millis(), setting.get(Settings.EMPTY).millis()); + assertEquals(defautlValue, setting.getDefault(Settings.EMPTY)); Setting secondaryDefault = new Setting<>("foo.bar", (s) -> s.get("old.foo.bar", "some_default"), (s) -> s, randomBoolean(), Setting.Scope.CLUSTER); assertEquals("some_default", secondaryDefault.get(Settings.EMPTY)); assertEquals("42", secondaryDefault.get(Settings.builder().put("old.foo.bar", 42).build())); + Setting secondaryDefaultViaSettings = new Setting<>("foo.bar", secondaryDefault, (s) -> s, randomBoolean(), Setting.Scope.CLUSTER); + assertEquals("some_default", secondaryDefaultViaSettings.get(Settings.EMPTY)); + assertEquals("42", secondaryDefaultViaSettings.get(Settings.builder().put("old.foo.bar", 42).build())); } public void testComplexType() { @@ -120,7 +123,7 @@ public class SettingTests extends ESTestCase { assertFalse(setting.isGroupSetting()); ref.set(setting.get(Settings.EMPTY)); ComplexType type = ref.get(); - ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); + ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); assertFalse(settingUpdater.apply(Settings.EMPTY, Settings.EMPTY)); assertSame("no update - type has not changed", type, ref.get()); @@ -147,7 +150,7 @@ public class SettingTests extends ESTestCase { AtomicReference ref = new AtomicReference<>(null); Setting setting = Setting.groupSetting("foo.bar.", true, Setting.Scope.CLUSTER); assertTrue(setting.isGroupSetting()); - ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); + ClusterSettings.SettingUpdater settingUpdater = setting.newUpdater(ref::set, logger); Settings currentInput = Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").put("foo.bar.3.value", "3").build(); Settings previousInput = Settings.EMPTY; @@ -191,7 +194,7 @@ public class SettingTests extends ESTestCase { assertTrue(setting.match("foo.bar.baz")); assertFalse(setting.match("foo.baz.bar")); - ClusterSettings.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger,(s) -> assertFalse(true)); + ClusterSettings.SettingUpdater predicateSettingUpdater = setting.newUpdater(ref::set, logger,(s) -> assertFalse(true)); try { predicateSettingUpdater.apply(Settings.builder().put("foo.bar.1.value", "1").put("foo.bar.2.value", "2").build(), Settings.EMPTY); fail("not accepted"); @@ -273,7 +276,7 @@ public class SettingTests extends ESTestCase { assertArrayEquals(value.toArray(new String[0]), input.toArray(new String[0])); AtomicReference> ref = new AtomicReference<>(); - AbstractScopedSettings.SettingUpdater settingUpdater = listSetting.newUpdater(ref::set, logger); + AbstractScopedSettings.SettingUpdater> settingUpdater = listSetting.newUpdater(ref::set, logger); assertTrue(settingUpdater.hasChanged(builder.build(), Settings.EMPTY)); settingUpdater.apply(builder.build(), Settings.EMPTY); assertEquals(input.size(), ref.get().size()); @@ -299,6 +302,26 @@ public class SettingTests extends ESTestCase { for (int i = 0; i < intValues.size(); i++) { assertEquals(i, intValues.get(i).intValue()); } + + Setting> settingWithFallback = Setting.listSetting("foo.baz", listSetting, s -> s, true, Setting.Scope.CLUSTER); + value = settingWithFallback.get(Settings.EMPTY); + assertEquals(1, value.size()); + assertEquals("foo,bar", value.get(0)); + + value = settingWithFallback.get(Settings.builder().putArray("foo.bar", "1", "2").build()); + assertEquals(2, value.size()); + assertEquals("1", value.get(0)); + assertEquals("2", value.get(1)); + + value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").build()); + assertEquals(2, value.size()); + assertEquals("3", value.get(0)); + assertEquals("4", value.get(1)); + + value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").putArray("foo.bar", "1", "2").build()); + assertEquals(2, value.size()); + assertEquals("3", value.get(0)); + assertEquals("4", value.get(1)); } public void testListSettingAcceptsNumberSyntax() { diff --git a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java index 25c3a136271..5d7bbb3ca18 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java @@ -19,6 +19,10 @@ package org.elasticsearch.common.unit; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.closeTo; @@ -73,4 +77,21 @@ public class DistanceUnitTests extends ESTestCase { assertEquals(7, DistanceUnit.MILES.ordinal()); assertEquals(8, DistanceUnit.METERS.ordinal()); } + + public void testReadWrite() throws Exception { + for (DistanceUnit unit : DistanceUnit.values()) { + try (BytesStreamOutput out = new BytesStreamOutput()) { + unit.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat("Roundtrip serialisation failed.", DistanceUnit.readDistanceUnit(in), equalTo(unit)); + } + } + } + } + + public void testFromString() { + for (DistanceUnit unit : DistanceUnit.values()) { + assertThat("Roundtrip string parsing failed.", DistanceUnit.fromString(unit.toString()), equalTo(unit)); + } + } } diff --git a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java deleted file mode 100644 index 25c765e6480..00000000000 --- a/core/src/test/java/org/elasticsearch/common/util/MultiDataPathUpgraderTests.java +++ /dev/null @@ -1,297 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.common.util; - -import org.apache.lucene.util.CollectionUtil; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestUtil; -import org.elasticsearch.bwcompat.OldIndexBackwardsCompatibilityIT; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.routing.AllocationId; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.gateway.MetaDataStateFormat; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardPath; -import org.elasticsearch.index.shard.ShardStateMetaData; -import org.elasticsearch.test.ESTestCase; - -import java.io.BufferedWriter; -import java.io.IOException; -import java.io.InputStream; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.nio.file.DirectoryStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -/** - */ -@LuceneTestCase.SuppressFileSystems("ExtrasFS") -public class MultiDataPathUpgraderTests extends ESTestCase { - - public void testUpgradeRandomPaths() throws IOException { - try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - final String uuid = Strings.base64UUID(); - final ShardId shardId = new ShardId("foo", 0); - final Path[] shardDataPaths = nodeEnvironment.availableShardPaths(shardId); - if (nodeEnvironment.nodeDataPaths().length == 1) { - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - assertFalse(helper.needsUpgrading(shardId)); - return; - } - int numIdxFiles = 0; - int numTranslogFiles = 0; - int metaStateVersion = 0; - for (Path shardPath : shardDataPaths) { - final Path translog = shardPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME); - final Path idx = shardPath.resolve(ShardPath.INDEX_FOLDER_NAME); - Files.createDirectories(translog); - Files.createDirectories(idx); - int numFiles = randomIntBetween(1, 10); - for (int i = 0; i < numFiles; i++, numIdxFiles++) { - String filename = Integer.toString(numIdxFiles); - try (BufferedWriter w = Files.newBufferedWriter(idx.resolve(filename + ".tst"), StandardCharsets.UTF_8)) { - w.write(filename); - } - } - numFiles = randomIntBetween(1, 10); - for (int i = 0; i < numFiles; i++, numTranslogFiles++) { - String filename = Integer.toString(numTranslogFiles); - try (BufferedWriter w = Files.newBufferedWriter(translog.resolve(filename + ".translog"), StandardCharsets.UTF_8)) { - w.write(filename); - } - } - ++metaStateVersion; - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(metaStateVersion, true, uuid, AllocationId.newInitializing()), metaStateVersion, shardDataPaths); - } - final Path path = randomFrom(shardDataPaths); - ShardPath targetPath = new ShardPath(false, path, path, uuid, new ShardId("foo", 0)); - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - helper.upgrade(shardId, targetPath); - assertFalse(helper.needsUpgrading(shardId)); - if (shardDataPaths.length > 1) { - for (Path shardPath : shardDataPaths) { - if (shardPath.equals(targetPath.getDataPath())) { - continue; - } - final Path translog = shardPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME); - final Path idx = shardPath.resolve(ShardPath.INDEX_FOLDER_NAME); - final Path state = shardPath.resolve(MetaDataStateFormat.STATE_DIR_NAME); - assertFalse(Files.exists(translog)); - assertFalse(Files.exists(idx)); - assertFalse(Files.exists(state)); - assertFalse(Files.exists(shardPath)); - } - } - - final ShardStateMetaData stateMetaData = ShardStateMetaData.FORMAT.loadLatestState(logger, targetPath.getShardStatePath()); - assertEquals(metaStateVersion, stateMetaData.version); - assertTrue(stateMetaData.primary); - assertEquals(uuid, stateMetaData.indexUUID); - final Path translog = targetPath.getDataPath().resolve(ShardPath.TRANSLOG_FOLDER_NAME); - final Path idx = targetPath.getDataPath().resolve(ShardPath.INDEX_FOLDER_NAME); - Files.deleteIfExists(idx.resolve("write.lock")); - assertEquals(numTranslogFiles, FileSystemUtils.files(translog).length); - assertEquals(numIdxFiles, FileSystemUtils.files(idx).length); - final HashSet translogFiles = Sets.newHashSet(FileSystemUtils.files(translog)); - for (int i = 0; i < numTranslogFiles; i++) { - final String name = Integer.toString(i); - translogFiles.contains(translog.resolve(name + ".translog")); - byte[] content = Files.readAllBytes(translog.resolve(name + ".translog")); - assertEquals(name , new String(content, StandardCharsets.UTF_8)); - } - final HashSet idxFiles = Sets.newHashSet(FileSystemUtils.files(idx)); - for (int i = 0; i < numIdxFiles; i++) { - final String name = Integer.toString(i); - idxFiles.contains(idx.resolve(name + ".tst")); - byte[] content = Files.readAllBytes(idx.resolve(name + ".tst")); - assertEquals(name , new String(content, StandardCharsets.UTF_8)); - } - } - } - - /** - * Run upgrade on a real bwc index - */ - public void testUpgradeRealIndex() throws IOException, URISyntaxException { - List indexes = new ArrayList<>(); - try (DirectoryStream stream = Files.newDirectoryStream(getBwcIndicesPath(), "index-*.zip")) { - for (Path path : stream) { - indexes.add(path); - } - } - CollectionUtil.introSort(indexes, new Comparator() { - @Override - public int compare(Path o1, Path o2) { - return o1.getFileName().compareTo(o2.getFileName()); - } - }); - final ShardId shardId = new ShardId("test", 0); - final Path path = randomFrom(indexes); - final Path indexFile = path; - final String indexName = indexFile.getFileName().toString().replace(".zip", "").toLowerCase(Locale.ROOT); - try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - if (nodeEnvironment.nodeDataPaths().length == 1) { - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - assertFalse(helper.needsUpgrading(shardId)); - return; - } - Path unzipDir = createTempDir(); - Path unzipDataDir = unzipDir.resolve("data"); - // decompress the index - try (InputStream stream = Files.newInputStream(indexFile)) { - TestUtil.unzip(stream, unzipDir); - } - // check it is unique - assertTrue(Files.exists(unzipDataDir)); - Path[] list = FileSystemUtils.files(unzipDataDir); - if (list.length != 1) { - throw new IllegalStateException("Backwards index must contain exactly one cluster but was " + list.length); - } - // the bwc scripts packs the indices under this path - Path src = list[0].resolve("nodes/0/indices/" + indexName); - assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src)); - Path[] multiDataPath = new Path[nodeEnvironment.nodeDataPaths().length]; - int i = 0; - for (NodeEnvironment.NodePath nodePath : nodeEnvironment.nodePaths()) { - multiDataPath[i++] = nodePath.indicesPath; - } - logger.info("--> injecting index [{}] into multiple data paths", indexName); - OldIndexBackwardsCompatibilityIT.copyIndex(logger, src, indexName, multiDataPath); - final ShardPath shardPath = new ShardPath(false, nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], IndexMetaData.INDEX_UUID_NA_VALUE, new ShardId(indexName, 0)); - - logger.info("{}", (Object)FileSystemUtils.files(shardPath.resolveIndex())); - - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - helper.upgrade(new ShardId(indexName, 0), shardPath); - helper.checkIndex(shardPath); - assertFalse(helper.needsUpgrading(new ShardId(indexName, 0))); - } - } - - public void testNeedsUpgrade() throws IOException { - try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - String uuid = Strings.randomBase64UUID(); - final ShardId shardId = new ShardId("foo", 0); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(1, true, uuid, AllocationId.newInitializing()), 1, nodeEnvironment.availableShardPaths(shardId)); - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - boolean multiDataPaths = nodeEnvironment.nodeDataPaths().length > 1; - boolean needsUpgrading = helper.needsUpgrading(shardId); - if (multiDataPaths) { - assertTrue(needsUpgrading); - } else { - assertFalse(needsUpgrading); - } - } - } - - public void testPickTargetShardPath() throws IOException { - try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - final ShardId shard = new ShardId("foo", 0); - final Path[] paths = nodeEnvironment.availableShardPaths(shard); - if (paths.length == 1) { - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment); - try { - helper.pickShardPath(new ShardId("foo", 0)); - fail("one path needs no upgrading"); - } catch (IllegalStateException ex) { - // only one path - } - } else { - final Map> pathToSpace = new HashMap<>(); - final Path expectedPath; - if (randomBoolean()) { // path with most of the file bytes - expectedPath = randomFrom(paths); - long[] used = new long[paths.length]; - long sumSpaceUsed = 0; - for (int i = 0; i < used.length; i++) { - long spaceUsed = paths[i] == expectedPath ? randomIntBetween(101, 200) : randomIntBetween(10, 100); - sumSpaceUsed += spaceUsed; - used[i] = spaceUsed; - } - for (int i = 0; i < used.length; i++) { - long availalbe = randomIntBetween((int)(2*sumSpaceUsed-used[i]), 4 * (int)sumSpaceUsed); - pathToSpace.put(paths[i], new Tuple<>(availalbe, used[i])); - } - } else { // path with largest available space - expectedPath = randomFrom(paths); - long[] used = new long[paths.length]; - long sumSpaceUsed = 0; - for (int i = 0; i < used.length; i++) { - long spaceUsed = randomIntBetween(10, 100); - sumSpaceUsed += spaceUsed; - used[i] = spaceUsed; - } - - for (int i = 0; i < used.length; i++) { - long availalbe = paths[i] == expectedPath ? randomIntBetween((int)(sumSpaceUsed), (int)(2*sumSpaceUsed)) : randomIntBetween(0, (int)(sumSpaceUsed) - 1) ; - pathToSpace.put(paths[i], new Tuple<>(availalbe, used[i])); - } - - } - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment) { - @Override - protected long getUsabelSpace(NodeEnvironment.NodePath path) throws IOException { - return pathToSpace.get(path.resolve(shard)).v1(); - } - - @Override - protected long getSpaceUsedByShard(Path path) throws IOException { - return pathToSpace.get(path).v2(); - } - }; - String uuid = Strings.randomBase64UUID(); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(1, true, uuid, AllocationId.newInitializing()), 1, paths); - final ShardPath shardPath = helper.pickShardPath(new ShardId("foo", 0)); - assertEquals(expectedPath, shardPath.getDataPath()); - assertEquals(expectedPath, shardPath.getShardStatePath()); - } - - MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment) { - @Override - protected long getUsabelSpace(NodeEnvironment.NodePath path) throws IOException { - return randomIntBetween(0, 10); - } - - @Override - protected long getSpaceUsedByShard(Path path) throws IOException { - return randomIntBetween(11, 20); - } - }; - - try { - helper.pickShardPath(new ShardId("foo", 0)); - fail("not enough space"); - } catch (IllegalStateException ex) { - // not enough space - } - } - } -} diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java new file mode 100644 index 00000000000..b0bbf0f47f2 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedRunnableTests.java @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.util.concurrent; + +import org.elasticsearch.common.Priority; +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; + +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +public class PrioritizedRunnableTests extends ESTestCase { + + // test unit conversion with a controlled clock + public void testGetAgeInMillis() throws Exception { + AtomicLong time = new AtomicLong(); + + PrioritizedRunnable runnable = new PrioritizedRunnable(Priority.NORMAL, time::get) { + @Override + public void run() { + } + }; + assertEquals(0, runnable.getAgeInMillis()); + int milliseconds = randomIntBetween(1, 256); + time.addAndGet(TimeUnit.NANOSECONDS.convert(milliseconds, TimeUnit.MILLISECONDS)); + assertEquals(milliseconds, runnable.getAgeInMillis()); + } + + // test age advances with System#nanoTime + public void testGetAgeInMillisWithRealClock() throws InterruptedException { + long nanosecondsInMillisecond = TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS); + PrioritizedRunnable runnable = new PrioritizedRunnable(Priority.NORMAL) { + @Override + public void run() { + } + }; + + // force at least one millisecond to elapse, but ensure the + // clock has enough resolution to observe the passage of time + long start = System.nanoTime(); + long elapsed; + while ((elapsed = (System.nanoTime() - start)) < nanosecondsInMillisecond) { + // busy spin + } + + // creation happened before start, so age will be at least as + // large as elapsed + assertThat(runnable.getAgeInMillis(), greaterThanOrEqualTo(TimeUnit.MILLISECONDS.convert(elapsed, TimeUnit.NANOSECONDS))); + } + +} diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index 2a1b146da92..2b3f918d545 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -46,7 +46,7 @@ public class DiscoveryModuleTests extends ModuleTestCase { public void testRegisterMasterElectionService() { Settings settings = Settings.builder().put("node.local", false). - put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_KEY, "custom").build(); + put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "custom").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addElectMasterService("custom", DummyMasterElectionService.class); assertBinding(module, ElectMasterService.class, DummyMasterElectionService.class); @@ -55,7 +55,7 @@ public class DiscoveryModuleTests extends ModuleTestCase { public void testLoadUnregisteredMasterElectionService() { Settings settings = Settings.builder().put("node.local", false). - put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_KEY, "foobar").build(); + put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "foobar").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addElectMasterService("custom", DummyMasterElectionService.class); assertBindingFailure(module, "Unknown master service type [foobar]"); @@ -71,7 +71,7 @@ public class DiscoveryModuleTests extends ModuleTestCase { public void testRegisterDiscovery() { boolean local = randomBoolean(); Settings settings = Settings.builder().put("node.local", local). - put(DiscoveryModule.DISCOVERY_TYPE_KEY, "custom").build(); + put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "custom").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addDiscoveryType("custom", DummyDisco.class); assertBinding(module, Discovery.class, DummyDisco.class); diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 67d3df42b38..f77bea8a27d 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.discovery; +import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.get.GetResponse; @@ -30,17 +31,21 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.Murmur3HashFunction; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.math.MathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.ZenDiscovery; @@ -96,6 +101,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -158,8 +164,8 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } final static Settings DEFAULT_SETTINGS = Settings.builder() - .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // for hitting simulated network failures quickly - .put(FaultDetection.SETTING_PING_RETRIES, "1") // for hitting simulated network failures quickly + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // for hitting simulated network failures quickly + .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put("http.enabled", false) // just to make test quicker @@ -460,7 +466,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { logger.info("[{}] Acquired semaphore and it has {} permits left", name, semaphore.availablePermits()); try { id = Integer.toString(idGenerator.incrementAndGet()); - int shard = Murmur3HashFunction.hash(id) % numPrimaries; + int shard = MathUtils.mod(Murmur3HashFunction.hash(id), numPrimaries); logger.trace("[{}] indexing id [{}] through node [{}] targeting shard [{}]", name, id, node, shard); IndexResponse response = client.prepareIndex("test", "type", id).setSource("{}").setTimeout("1s").get(); assertThat(response.getVersion(), equalTo(1l)); @@ -883,6 +889,71 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { internalCluster().stopRandomNonMasterNode(); } + // simulate handling of sending shard failure during an isolation + public void testSendingShardFailure() throws Exception { + List nodes = startCluster(3, 2); + String masterNode = internalCluster().getMasterName(); + List nonMasterNodes = nodes.stream().filter(node -> !node.equals(masterNode)).collect(Collectors.toList()); + String nonMasterNode = randomFrom(nonMasterNodes); + assertAcked(prepareCreate("test") + .setSettings(Settings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 3) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2) + )); + ensureGreen(); + String nonMasterNodeId = internalCluster().clusterService(nonMasterNode).localNode().getId(); + + // fail a random shard + ShardRouting failedShard = + randomFrom(clusterService().state().getRoutingNodes().node(nonMasterNodeId).shardsWithState(ShardRoutingState.STARTED)); + ShardStateAction service = internalCluster().getInstance(ShardStateAction.class, nonMasterNode); + String indexUUID = clusterService().state().metaData().index("test").getIndexUUID(); + CountDownLatch latch = new CountDownLatch(1); + AtomicBoolean success = new AtomicBoolean(); + + String isolatedNode = randomBoolean() ? masterNode : nonMasterNode; + NetworkPartition networkPartition = addRandomIsolation(isolatedNode); + networkPartition.startDisrupting(); + + service.shardFailed(failedShard, indexUUID, "simulated", new CorruptIndexException("simulated", (String) null), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + success.set(true); + latch.countDown(); + } + + @Override + public void onFailure(Throwable t) { + success.set(false); + latch.countDown(); + assert false; + } + }); + + if (isolatedNode.equals(nonMasterNode)) { + assertNoMaster(nonMasterNode); + } else { + ensureStableCluster(2, nonMasterNode); + } + + // heal the partition + networkPartition.removeAndEnsureHealthy(internalCluster()); + + // the cluster should stabilize + ensureStableCluster(3); + + latch.await(); + + // the listener should be notified + assertTrue(success.get()); + + // the failed shard should be gone + List shards = clusterService().state().getRoutingTable().allShards("test"); + for (ShardRouting shard : shards) { + assertThat(shard.allocationId(), not(equalTo(failedShard.allocationId()))); + } + } + public void testClusterFormingWithASlowNode() throws Exception { configureUnicastCluster(3, null, 2); @@ -891,7 +962,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { // don't wait for initial state, wat want to add the disruption while the cluster is forming.. internalCluster().startNodesAsync(3, Settings.builder() - .put(DiscoveryService.SETTING_INITIAL_STATE_TIMEOUT, "1ms") + .put(DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING.getKey(), "1ms") .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "3s") .build()).get(); diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index e7a10b0f62b..e3279d2839c 100644 --- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -131,8 +131,8 @@ public class ZenFaultDetectionTests extends ESTestCase { Settings.Builder settings = Settings.builder(); boolean shouldRetry = randomBoolean(); // make sure we don't ping again after the initial ping - settings.put(FaultDetection.SETTING_CONNECT_ON_NETWORK_DISCONNECT, shouldRetry) - .put(FaultDetection.SETTING_PING_INTERVAL, "5m"); + settings.put(FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING.getKey(), shouldRetry) + .put(FaultDetection.PING_INTERVAL_SETTING.getKey(), "5m"); ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(buildNodesForA(true)).build(); NodesFaultDetection nodesFDA = new NodesFaultDetection(settings.build(), threadPool, serviceA, clusterState.getClusterName()); nodesFDA.setLocalNode(nodeA); @@ -179,8 +179,8 @@ public class ZenFaultDetectionTests extends ESTestCase { Settings.Builder settings = Settings.builder(); boolean shouldRetry = randomBoolean(); // make sure we don't ping - settings.put(FaultDetection.SETTING_CONNECT_ON_NETWORK_DISCONNECT, shouldRetry) - .put(FaultDetection.SETTING_PING_INTERVAL, "5m"); + settings.put(FaultDetection.CONNECT_ON_NETWORK_DISCONNECT_SETTING.getKey(), shouldRetry) + .put(FaultDetection.PING_INTERVAL_SETTING.getKey(), "5m"); ClusterName clusterName = new ClusterName(randomAsciiOfLengthBetween(3, 20)); final ClusterState state = ClusterState.builder(clusterName).nodes(buildNodesForA(false)).build(); MasterFaultDetection masterFD = new MasterFaultDetection(settings.build(), threadPool, serviceA, clusterName, diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index 9f9c0420c2f..eb17ab29d33 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -95,8 +95,8 @@ public class ZenDiscoveryIT extends ESIntegTestCase { public void testNoShardRelocationsOccurWhenElectedMasterNodeFails() throws Exception { Settings defaultSettings = Settings.builder() - .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") - .put(FaultDetection.SETTING_PING_RETRIES, "1") + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") + .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") .put("discovery.type", "zen") .build(); @@ -142,8 +142,8 @@ public class ZenDiscoveryIT extends ESIntegTestCase { @TestLogging(value = "action.admin.cluster.health:TRACE") public void testNodeFailuresAreProcessedOnce() throws ExecutionException, InterruptedException, IOException { Settings defaultSettings = Settings.builder() - .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") - .put(FaultDetection.SETTING_PING_RETRIES, "1") + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") + .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") .put("discovery.type", "zen") .build(); diff --git a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java index 79f9efbb814..0a62d2829d3 100644 --- a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -40,8 +40,8 @@ public class EnvironmentTests extends ESTestCase { public Environment newEnvironment(Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) - .put("path.home", createTempDir().toAbsolutePath()) - .putArray("path.data", tmpPaths()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) + .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); return new Environment(build); } @@ -49,7 +49,7 @@ public class EnvironmentTests extends ESTestCase { Environment environment = newEnvironment(); assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue()); assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue()); - environment = newEnvironment(settingsBuilder().putArray("path.repo", "/test/repos", "/another/repos", "/test/repos/../other").build()); + environment = newEnvironment(settingsBuilder().putArray(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build()); assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue()); assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue()); assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index acee455bb6c..1ead12ff432 100644 --- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -50,7 +50,7 @@ public class NodeEnvironmentTests extends ESTestCase { NodeEnvironment env = newNodeEnvironment(Settings.builder() .put("node.max_local_storage_nodes", 1).build()); Settings settings = env.getSettings(); - String[] dataPaths = env.getSettings().getAsArray("path.data"); + List dataPaths = Environment.PATH_DATA_SETTING.get(env.getSettings()); try { new NodeEnvironment(settings, new Environment(settings)); @@ -62,10 +62,10 @@ public class NodeEnvironmentTests extends ESTestCase { // now can recreate and lock it env = new NodeEnvironment(settings, new Environment(settings)); - assertEquals(env.nodeDataPaths().length, dataPaths.length); + assertEquals(env.nodeDataPaths().length, dataPaths.size()); - for (int i = 0; i < dataPaths.length; i++) { - assertTrue(env.nodeDataPaths()[i].startsWith(PathUtils.get(dataPaths[i]))); + for (int i = 0; i < dataPaths.size(); i++) { + assertTrue(env.nodeDataPaths()[i].startsWith(PathUtils.get(dataPaths.get(i)))); } env.close(); assertTrue("LockedShards: " + env.lockedShards(), env.lockedShards().isEmpty()); @@ -74,11 +74,11 @@ public class NodeEnvironmentTests extends ESTestCase { public void testNodeLockMultipleEnvironment() throws IOException { final NodeEnvironment first = newNodeEnvironment(); - String[] dataPaths = first.getSettings().getAsArray("path.data"); + List dataPaths = Environment.PATH_DATA_SETTING.get(first.getSettings()); NodeEnvironment second = new NodeEnvironment(first.getSettings(), new Environment(first.getSettings())); - assertEquals(first.nodeDataPaths().length, dataPaths.length); - assertEquals(second.nodeDataPaths().length, dataPaths.length); - for (int i = 0; i < dataPaths.length; i++) { + assertEquals(first.nodeDataPaths().length, dataPaths.size()); + assertEquals(second.nodeDataPaths().length, dataPaths.size()); + for (int i = 0; i < dataPaths.size(); i++) { assertEquals(first.nodeDataPaths()[i].getParent(), second.nodeDataPaths()[i].getParent()); } IOUtils.close(first, second); @@ -355,25 +355,25 @@ public class NodeEnvironmentTests extends ESTestCase { public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) - .put("path.home", createTempDir().toAbsolutePath().toString()) - .putArray("path.data", tmpPaths()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) + .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); return new NodeEnvironment(build, new Environment(build)); } public NodeEnvironment newNodeEnvironment(String[] dataPaths, Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) - .put("path.home", createTempDir().toAbsolutePath().toString()) - .putArray("path.data", dataPaths).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) + .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build(); return new NodeEnvironment(build, new Environment(build)); } public NodeEnvironment newNodeEnvironment(String[] dataPaths, String sharedDataPath, Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) - .put("path.home", createTempDir().toAbsolutePath().toString()) - .put("path.shared_data", sharedDataPath) - .putArray("path.data", dataPaths).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) + .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataPath) + .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build(); return new NodeEnvironment(build, new Environment(build)); } } diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java index 1c3ec79dd94..d2f7bb888cd 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java @@ -64,7 +64,7 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase { String node2 = nodeName2.get(); String index = "index"; - assertAcked(prepareCreate(index).setSettings(Settings.builder().put("index.number_of_replicas", 0).put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "_name", node1))); + assertAcked(prepareCreate(index).setSettings(Settings.builder().put("index.number_of_replicas", 0).put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "_name", node1))); index(index, "doc", "1", jsonBuilder().startObject().field("text", "some text").endObject()); ensureGreen(); assertIndexInMetaState(node1, index); @@ -72,7 +72,7 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase { assertIndexInMetaState(masterNode, index); logger.debug("relocating index..."); - client().admin().indices().prepareUpdateSettings(index).setSettings(Settings.builder().put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "_name", node2)).get(); + client().admin().indices().prepareUpdateSettings(index).setSettings(Settings.builder().put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "_name", node2)).get(); client().admin().cluster().prepareHealth().setWaitForRelocatingShards(0).get(); ensureGreen(); assertIndexDirectoryDeleted(node1, index); diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index 44c1fae6492..e5362aa84fc 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -35,15 +35,15 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESAllocationTestCase; import org.junit.Before; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; import static org.hamcrest.Matchers.anyOf; @@ -104,7 +104,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { } else { allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_0); } - testAllocator.addData(node1, -1, null); + testAllocator.addData(node1, -1, null, randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -116,7 +116,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testNoMatchingAllocationIdFound() { RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.CURRENT, "id2"); - testAllocator.addData(node1, 1, "id1"); + testAllocator.addData(node1, 1, "id1", randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); @@ -129,7 +129,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testNoActiveAllocationIds() { RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_1); - testAllocator.addData(node1, 1, null); + testAllocator.addData(node1, 1, null, randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -144,10 +144,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { final RoutingAllocation allocation; if (randomBoolean()) { allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); - testAllocator.addData(node1, 1, "allocId1", new CorruptIndexException("test", "test")); + testAllocator.addData(node1, 1, "allocId1", randomBoolean(), new CorruptIndexException("test", "test")); } else { allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_1); - testAllocator.addData(node1, 3, null, new CorruptIndexException("test", "test")); + testAllocator.addData(node1, 3, null, randomBoolean(), new CorruptIndexException("test", "test")); } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); @@ -162,10 +162,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { final RoutingAllocation allocation; if (randomBoolean()) { allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); - testAllocator.addData(node1, 1, "allocId1"); + testAllocator.addData(node1, 1, "allocId1", randomBoolean()); } else { allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_2_0); - testAllocator.addData(node1, 3, null); + testAllocator.addData(node1, 3, null, randomBoolean()); } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); @@ -174,6 +174,24 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(node1.id())); } + /** + * Tests that when there was a node that previously had the primary, it will be allocated to that same node again. + */ + public void testPreferAllocatingPreviousPrimary() { + String primaryAllocId = Strings.randomBase64UUID(); + String replicaAllocId = Strings.randomBase64UUID(); + RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), primaryAllocId, replicaAllocId); + boolean node1HasPrimaryShard = randomBoolean(); + testAllocator.addData(node1, 1, node1HasPrimaryShard ? primaryAllocId : replicaAllocId, node1HasPrimaryShard); + testAllocator.addData(node2, 1, node1HasPrimaryShard ? replicaAllocId : primaryAllocId, !node1HasPrimaryShard); + boolean changed = testAllocator.allocateUnassigned(allocation); + assertThat(changed, equalTo(true)); + assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + DiscoveryNode allocatedNode = node1HasPrimaryShard ? node1 : node2; + assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(allocatedNode.id())); + } + /** * Tests that when there is a node to allocate to, but it is throttling (and it is the only one), * it will be moved to ignore unassigned until it can be allocated to. @@ -182,10 +200,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { final RoutingAllocation allocation; if (randomBoolean()) { allocation = routingAllocationWithOnePrimaryNoReplicas(throttleAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); - testAllocator.addData(node1, 1, "allocId1"); + testAllocator.addData(node1, 1, "allocId1", randomBoolean()); } else { allocation = routingAllocationWithOnePrimaryNoReplicas(throttleAllocationDeciders(), false, Version.V_2_2_0); - testAllocator.addData(node1, 3, null); + testAllocator.addData(node1, 3, null, randomBoolean()); } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); @@ -201,10 +219,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { final RoutingAllocation allocation; if (randomBoolean()) { allocation = routingAllocationWithOnePrimaryNoReplicas(noAllocationDeciders(), false, randomFrom(Version.V_2_0_0, Version.CURRENT), "allocId1"); - testAllocator.addData(node1, 1, "allocId1"); + testAllocator.addData(node1, 1, "allocId1", randomBoolean()); } else { allocation = routingAllocationWithOnePrimaryNoReplicas(noAllocationDeciders(), false, Version.V_2_0_0); - testAllocator.addData(node1, 3, null); + testAllocator.addData(node1, 3, null, randomBoolean()); } boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); @@ -218,7 +236,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testAllocateToTheHighestVersionOnLegacyIndex() { RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_0_0); - testAllocator.addData(node1, 10, null).addData(node2, 12, null); + testAllocator.addData(node1, 10, null, randomBoolean()).addData(node2, 12, null, randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -232,7 +250,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testRestore() { RoutingAllocation allocation = getRestoreRoutingAllocation(yesAllocationDeciders()); - testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + testAllocator.addData(node1, 1, randomFrom(null, "allocId"), randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -245,7 +263,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testRestoreThrottle() { RoutingAllocation allocation = getRestoreRoutingAllocation(throttleAllocationDeciders()); - testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + testAllocator.addData(node1, 1, randomFrom(null, "allocId"), randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false)); @@ -257,7 +275,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testRestoreForcesAllocateIfShardAvailable() { RoutingAllocation allocation = getRestoreRoutingAllocation(noAllocationDeciders()); - testAllocator.addData(node1, 1, randomFrom(null, "some allocId")); + testAllocator.addData(node1, 1, randomFrom(null, "some allocId"), randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -270,7 +288,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testRestoreDoesNotAssignIfNoShardAvailable() { RoutingAllocation allocation = getRestoreRoutingAllocation(yesAllocationDeciders()); - testAllocator.addData(node1, -1, null); + testAllocator.addData(node1, -1, null, false); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -281,7 +299,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { Version version = randomFrom(Version.CURRENT, Version.V_2_0_0); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version)).numberOfShards(1).numberOfReplicas(0) - .putActiveAllocationIds(0, version == Version.CURRENT ? new HashSet<>(Arrays.asList("allocId")) : Collections.emptySet())) + .putActiveAllocationIds(0, version == Version.CURRENT ? Sets.newHashSet("allocId") : Collections.emptySet())) .build(); RoutingTable routingTable = RoutingTable.builder() @@ -300,7 +318,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testRecoverOnAnyNode() { RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(yesAllocationDeciders()); - testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + testAllocator.addData(node1, 1, randomFrom(null, "allocId"), randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -313,7 +331,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testRecoverOnAnyNodeThrottle() { RoutingAllocation allocation = getRestoreRoutingAllocation(throttleAllocationDeciders()); - testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + testAllocator.addData(node1, 1, randomFrom(null, "allocId"), randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false)); @@ -325,7 +343,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testRecoverOnAnyNodeForcesAllocateIfShardAvailable() { RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(noAllocationDeciders()); - testAllocator.addData(node1, 1, randomFrom(null, "allocId")); + testAllocator.addData(node1, 1, randomFrom(null, "allocId"), randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -338,7 +356,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { */ public void testRecoverOnAnyNodeDoesNotAssignIfNoShardAvailable() { RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(yesAllocationDeciders()); - testAllocator.addData(node1, -1, null); + testAllocator.addData(node1, -1, null, randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); @@ -351,7 +369,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version) .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) .put(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, true)) - .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, version == Version.CURRENT ? new HashSet<>(Arrays.asList("allocId")) : Collections.emptySet())) + .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, version == Version.CURRENT ? Sets.newHashSet("allocId") : Collections.emptySet())) .build(); RoutingTable routingTable = RoutingTable.builder() @@ -387,7 +405,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node1, 1, null); + testAllocator.addData(node1, 1, null, randomBoolean()); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); @@ -395,7 +413,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node2, 1, null); + testAllocator.addData(node2, 1, null, randomBoolean()); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); @@ -428,7 +446,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node1, 1, null); + testAllocator.addData(node1, 1, null, randomBoolean()); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(false)); @@ -436,7 +454,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas - testAllocator.addData(node2, 2, null); + testAllocator.addData(node2, 2, null, randomBoolean()); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state.nodes(), null, System.nanoTime()); changed = testAllocator.allocateUnassigned(allocation); assertThat(changed, equalTo(true)); @@ -449,7 +467,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { private RoutingAllocation routingAllocationWithOnePrimaryNoReplicas(AllocationDeciders deciders, boolean asNew, Version version, String... activeAllocationIds) { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(version)) - .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, new HashSet<>(Arrays.asList(activeAllocationIds)))) + .numberOfShards(1).numberOfReplicas(0).putActiveAllocationIds(0, Sets.newHashSet(activeAllocationIds))) .build(); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(); if (asNew) { @@ -477,15 +495,15 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { return this; } - public TestAllocator addData(DiscoveryNode node, long version, String allocationId) { - return addData(node, version, allocationId, null); + public TestAllocator addData(DiscoveryNode node, long version, String allocationId, boolean primary) { + return addData(node, version, allocationId, primary, null); } - public TestAllocator addData(DiscoveryNode node, long version, String allocationId, @Nullable Throwable storeException) { + public TestAllocator addData(DiscoveryNode node, long version, String allocationId, boolean primary, @Nullable Throwable storeException) { if (data == null) { data = new HashMap<>(); } - data.put(node, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node, version, allocationId, storeException)); + data.put(node, new TransportNodesListGatewayStartedShards.NodeGatewayStartedShards(node, version, allocationId, primary, storeException)); return this; } diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java index dbdf747de63..87a10625c5b 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryBackwardsCompatibilityIT.java @@ -60,7 +60,7 @@ public class RecoveryBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testReusePeerRecovery() throws Exception { assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE))); + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE))); logger.info("--> indexing docs"); int numDocs = scaledRandomIntBetween(100, 1000); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index f0650a1cbda..e2cb4bc7925 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -28,9 +28,11 @@ import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDeci import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.recovery.RecoveryState; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -39,6 +41,8 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSDirectoryService; import org.elasticsearch.test.store.MockFSIndexStore; +import java.util.Collection; + import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -54,6 +58,12 @@ import static org.hamcrest.Matchers.notNullValue; @ClusterScope(numDataNodes = 0, scope = Scope.TEST) public class RecoveryFromGatewayIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(MockFSIndexStore.TestPlugin.class); + } + public void testOneNodeRecoverFromGateway() throws Exception { internalCluster().startNode(); @@ -322,11 +332,11 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { public void testReusePeerRecovery() throws Exception { final Settings settings = settingsBuilder() .put("action.admin.cluster.node.shutdown.delay", "10ms") - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) .put("gateway.recover_after_nodes", 4) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING, 4) - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING, 4) - .put(MockFSDirectoryService.CRASH_INDEX, false).build(); + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 4) + .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 4) + .put(MockFSDirectoryService.CRASH_INDEX_SETTING.getKey(), false).build(); internalCluster().startNodesAsync(4, settings).get(); // prevent any rebalance actions during the peer recovery @@ -334,7 +344,7 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { // we reuse the files on disk after full restarts for replicas. assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(indexSettings()) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE))); + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE))); ensureGreen(); logger.info("--> indexing docs"); for (int i = 0; i < 1000; i++) { @@ -429,11 +439,11 @@ public class RecoveryFromGatewayIT extends ESIntegTestCase { public void testRecoveryDifferentNodeOrderStartup() throws Exception { // we need different data paths so we make sure we start the second node fresh - final String node_1 = internalCluster().startNode(settingsBuilder().put("path.data", createTempDir()).build()); + final String node_1 = internalCluster().startNode(settingsBuilder().put(Environment.PATH_DATA_SETTING.getKey(), createTempDir()).build()); client().prepareIndex("test", "type1", "1").setSource("field", "value").execute().actionGet(); - internalCluster().startNode(settingsBuilder().put("path.data", createTempDir()).build()); + internalCluster().startNode(settingsBuilder().put(Environment.PATH_DATA_SETTING.getKey(), createTempDir()).build()); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index 0a6ddca5d24..17a3da6421f 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; @@ -51,11 +52,9 @@ import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; import org.elasticsearch.test.ESAllocationTestCase; import org.junit.Before; -import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; @@ -228,7 +227,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { public void testDelayedAllocation() { RoutingAllocation allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), - Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); + Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); testAllocator.addData(node1, true, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); if (randomBoolean()) { // we sometime return empty list of files, make sure we test this as well @@ -241,7 +240,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); allocation = onePrimaryOnNode1And1Replica(yesAllocationDeciders(), - Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); + Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueHours(1)).build(), UnassignedInfo.Reason.NODE_LEFT); testAllocator.addData(node2, false, "MATCH", new StoreFileMetaData("file1", 10, "MATCH_CHECKSUM")); AllocationService.updateLeftDelayOfUnassignedShards(allocation, Settings.EMPTY); changed = testAllocator.allocateUnassigned(allocation); @@ -289,7 +288,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT).put(settings)) .numberOfShards(1).numberOfReplicas(1) - .putActiveAllocationIds(0, new HashSet<>(Arrays.asList(primaryShard.allocationId().getId())))) + .putActiveAllocationIds(0, Sets.newHashSet(primaryShard.allocationId().getId()))) .build(); RoutingTable routingTable = RoutingTable.builder() .add(IndexRoutingTable.builder(shardId.getIndex()) @@ -311,7 +310,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder(shardId.getIndex()).settings(settings(Version.CURRENT)) .numberOfShards(1).numberOfReplicas(1) - .putActiveAllocationIds(0, new HashSet<>(Arrays.asList(primaryShard.allocationId().getId())))) + .putActiveAllocationIds(0, Sets.newHashSet(primaryShard.allocationId().getId()))) .build(); RoutingTable routingTable = RoutingTable.builder() .add(IndexRoutingTable.builder(shardId.getIndex()) diff --git a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java index aca3906d185..e9d6154f71a 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReusePeerRecoverySharedTest.java @@ -65,7 +65,7 @@ public class ReusePeerRecoverySharedTest { * for replicas. */ assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put(indexSettings) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE))); + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE))); client().admin().cluster().prepareHealth().setWaitForGreenStatus().setTimeout("30s").get(); logger.info("--> indexing docs"); for (int i = 0; i < 1000; i++) { diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index cce4c0d22c4..6cc6def4931 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -40,10 +40,13 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Map; @@ -62,6 +65,12 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; public class GetActionIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + public void testSimpleGet() { assertAcked(prepareCreate("test") .setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1)) diff --git a/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java b/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java index 4d73b52576a..f227a9a03b4 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java +++ b/core/src/test/java/org/elasticsearch/http/netty/HttpPublishPortIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.http.netty; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -39,7 +40,7 @@ public class HttpPublishPortIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put(Node.HTTP_ENABLED, true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .put("http.publish_port", 9080) .build(); } diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java index cb111a71988..f02916c68ac 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java @@ -81,7 +81,7 @@ public class NettyHttpChannelTests extends ESTestCase { public void testCorsEnabledWithoutAllowOrigins() { // Set up a HTTP transport with only the CORS enabled setting Settings settings = Settings.builder() - .put(NettyHttpServerTransport.SETTING_CORS_ENABLED, true) + .put(NettyHttpServerTransport.SETTING_CORS_ENABLED.getKey(), true) .build(); httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays); HttpRequest httpRequest = new TestHttpRequest(); @@ -104,7 +104,7 @@ public class NettyHttpChannelTests extends ESTestCase { public void testCorsEnabledWithAllowOrigins() { // create a http transport with CORS enabled and allow origin configured Settings settings = Settings.builder() - .put(NettyHttpServerTransport.SETTING_CORS_ENABLED, true) + .put(NettyHttpServerTransport.SETTING_CORS_ENABLED.getKey(), true) .put(NettyHttpServerTransport.SETTING_CORS_ALLOW_ORIGIN, "remote-host") .build(); httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java index f4ce3756e61..8f7765dcc87 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.http.netty; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.http.HttpServerTransport; @@ -45,7 +46,7 @@ import static org.hamcrest.Matchers.hasSize; public class NettyPipeliningDisabledIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Node.HTTP_ENABLED, true).put("http.pipelining", false).build(); + return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(NetworkModule.HTTP_ENABLED.getKey(), true).put("http.pipelining", false).build(); } public void testThatNettyHttpServerDoesNotSupportPipelining() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java index 9e5971c1d4f..93f54cb7628 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.http.netty; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.http.HttpServerTransport; @@ -42,7 +43,7 @@ import static org.hamcrest.Matchers.is; public class NettyPipeliningEnabledIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { - return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Node.HTTP_ENABLED, true).put("http.pipelining", true).build(); + return settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(NetworkModule.HTTP_ENABLED.getKey(), true).put("http.pipelining", true).build(); } public void testThatNettyHttpServerSupportsPipelining() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index b83af9c0960..26656296498 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -18,6 +18,15 @@ */ package org.elasticsearch.index; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInvertState; @@ -33,7 +42,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; @@ -51,7 +60,6 @@ import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStoreConfig; -import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesWarmer; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -70,14 +78,7 @@ import org.elasticsearch.test.engine.MockEngineFactory; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; +import static java.util.Collections.emptyMap; public class IndexModuleTests extends ESTestCase { private Index index; @@ -108,7 +109,7 @@ public class IndexModuleTests extends ESTestCase { Set scriptEngines = new HashSet<>(); scriptEngines.addAll(Arrays.asList(scriptEngineServices)); ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), new ScriptContextRegistry(Collections.emptyList())); - IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, Collections.emptySet(), new NamedWriteableRegistry()); + IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(settings, emptyMap()); return new NodeServicesProvider(threadPool, indicesQueryCache, null, warmer, bigArrays, client, scriptService, indicesQueriesRegistry, indicesFieldDataCache, circuitBreakerService); } @@ -116,7 +117,7 @@ public class IndexModuleTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); index = new Index("foo"); - settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).build(); + settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); indexSettings = IndexSettingsModule.newIndexSettings(index, settings); environment = new Environment(settings); nodeServicesProvider = newNodeServiceProvider(settings, environment, null); @@ -147,7 +148,12 @@ public class IndexModuleTests extends ESTestCase { public void testRegisterIndexStore() throws IOException { final Index index = new Index("foo"); - final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("path.home", createTempDir().toString()).put(IndexModule.STORE_TYPE, "foo_store").build(); + final Settings settings = Settings + .builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "foo_store") + .build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); module.addIndexStore("foo_store", FooStore::new); @@ -173,13 +179,11 @@ public class IndexModuleTests extends ESTestCase { IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); Consumer listener = (s) -> {}; - module.addIndexSettingsListener(listener); module.addIndexEventListener(eventListener); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry); IndexSettings x = indexService.getIndexSettings(); assertEquals(x.getSettings().getAsMap(), indexSettings.getSettings().getAsMap()); assertEquals(x.getIndex(), index); - assertSame(x.getUpdateListeners().get(0), listener); indexService.getIndexEventListener().beforeIndexDeleted(null); assertTrue(atomicBoolean.get()); indexService.close("simon says", false); @@ -187,28 +191,22 @@ public class IndexModuleTests extends ESTestCase { public void testListener() throws IOException { - IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(null, environment)); - Consumer listener = (s) -> { - }; - module.addIndexSettingsListener(listener); + Setting booleanSetting = Setting.boolSetting("foo.bar", false, true, Setting.Scope.INDEX); + IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(index, settings, booleanSetting), null, new AnalysisRegistry(null, environment)); + Setting booleanSetting2 = Setting.boolSetting("foo.bar.baz", false, true, Setting.Scope.INDEX); + AtomicBoolean atomicBoolean = new AtomicBoolean(false); + module.addSettingsUpdateConsumer(booleanSetting, atomicBoolean::set); try { - module.addIndexSettingsListener(listener); - fail("already added"); - } catch (IllegalStateException ex) { - - } - - try { - module.addIndexSettingsListener(null); - fail("must not be null"); + module.addSettingsUpdateConsumer(booleanSetting2, atomicBoolean::set); + fail("not registered"); } catch (IllegalArgumentException ex) { } + IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry); - IndexSettings x = indexService.getIndexSettings(); - assertEquals(1, x.getUpdateListeners().size()); - assertSame(x.getUpdateListeners().get(0), listener); + assertSame(booleanSetting, indexService.getIndexSettings().getScopedSettings().get(booleanSetting.getKey())); + indexService.close("simon says", false); } @@ -217,7 +215,7 @@ public class IndexModuleTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.similarity.my_similarity.type", "test_similarity") .put("index.similarity.my_similarity.key", "there is a key") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); module.addSimilarity("test_similarity", (string, settings) -> new SimilarityProvider() { @@ -245,7 +243,7 @@ public class IndexModuleTests extends ESTestCase { Settings indexSettings = Settings.settingsBuilder() .put("index.similarity.my_similarity.type", "test_similarity") .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); try { @@ -258,7 +256,7 @@ public class IndexModuleTests extends ESTestCase { public void testSetupWithoutType() throws IOException { Settings indexSettings = Settings.settingsBuilder() .put("index.similarity.my_similarity.foo", "bar") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); @@ -271,7 +269,7 @@ public class IndexModuleTests extends ESTestCase { public void testCannotRegisterProvidedImplementations() { Settings indexSettings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); try { @@ -298,8 +296,8 @@ public class IndexModuleTests extends ESTestCase { public void testRegisterCustomQueryCache() throws IOException { Settings indexSettings = Settings.settingsBuilder() - .put(IndexModule.QUERY_CACHE_TYPE, "custom") - .put("path.home", createTempDir().toString()) + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), "custom") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); module.registerQueryCache("custom", (a, b) -> new CustomQueryCache()); @@ -317,7 +315,7 @@ public class IndexModuleTests extends ESTestCase { public void testDefaultQueryCacheImplIsSelected() throws IOException { Settings indexSettings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexModule module = new IndexModule(IndexSettingsModule.newIndexSettings(new Index("foo"), indexSettings), null, new AnalysisRegistry(null, environment)); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, mapperRegistry); @@ -374,8 +372,6 @@ public class IndexModuleTests extends ESTestCase { } } - - public static final class FooStore extends IndexStore { public FooStore(IndexSettings indexSettings, IndexStoreConfig config) { diff --git a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 4c892849ccd..15f9a3e78b3 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -68,12 +68,6 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertTrue("shadow replicas for replica shards with shadow settings",IndexService.useShadowEngine(false, shadowSettings)); } - public IndexService newIndexService() { - Settings settings = Settings.builder().put("name", "indexServiceTests").build(); - return createIndex("test", settings); - } - - public static CompressedXContent filter(QueryBuilder filterBuilder) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -82,7 +76,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testFilteringAliases() throws Exception { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "dogs", filter(termQuery("animal", "dog"))); @@ -106,7 +100,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testAliasFilters() throws Exception { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); @@ -123,7 +117,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testRemovedAliasFilter() throws Exception { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); @@ -137,7 +131,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testUnknownAliasFilter() throws Exception { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); @@ -162,7 +156,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testBaseAsyncTask() throws InterruptedException, IOException { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); AtomicReference latch = new AtomicReference<>(new CountDownLatch(1)); AtomicReference latch2 = new AtomicReference<>(new CountDownLatch(1)); final AtomicInteger count = new AtomicInteger(); @@ -221,13 +215,13 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testRefreshTaskIsUpdated() throws IOException { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexService.AsyncRefreshTask refreshTask = indexService.getRefreshTask(); assertEquals(1000, refreshTask.getInterval().millis()); assertTrue(indexService.getRefreshTask().mustReschedule()); // now disable - IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, -1)).build(); + IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1)).build(); indexService.updateMetaData(metaData); assertNotSame(refreshTask, indexService.getRefreshTask()); assertTrue(refreshTask.isClosed()); @@ -235,7 +229,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertFalse(indexService.getRefreshTask().mustReschedule()); // set it to 100ms - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, "100ms")).build(); + metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "100ms")).build(); indexService.updateMetaData(metaData); assertNotSame(refreshTask, indexService.getRefreshTask()); assertTrue(refreshTask.isClosed()); @@ -246,7 +240,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertEquals(100, refreshTask.getInterval().millis()); // set it to 200ms - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, "200ms")).build(); + metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "200ms")).build(); indexService.updateMetaData(metaData); assertNotSame(refreshTask, indexService.getRefreshTask()); assertTrue(refreshTask.isClosed()); @@ -257,7 +251,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertEquals(200, refreshTask.getInterval().millis()); // set it to 200ms again - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, "200ms")).build(); + metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "200ms")).build(); indexService.updateMetaData(metaData); assertSame(refreshTask, indexService.getRefreshTask()); assertTrue(indexService.getRefreshTask().mustReschedule()); @@ -270,7 +264,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testFsyncTaskIsRunning() throws IOException { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); IndexService.AsyncTranslogFSync fsyncTask = indexService.getFsyncTask(); assertNotNull(fsyncTask); assertEquals(5000, fsyncTask.getInterval().millis()); @@ -283,14 +277,14 @@ public class IndexServiceTests extends ESSingleNodeTestCase { } public void testRefreshActuallyWorks() throws Exception { - IndexService indexService = newIndexService(); + IndexService indexService = createIndex("test", Settings.EMPTY); ensureGreen("test"); IndexService.AsyncRefreshTask refreshTask = indexService.getRefreshTask(); assertEquals(1000, refreshTask.getInterval().millis()); assertTrue(indexService.getRefreshTask().mustReschedule()); // now disable - IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, -1)).build(); + IndexMetaData metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1)).build(); indexService.updateMetaData(metaData); client().prepareIndex("test", "test", "1").setSource("{\"foo\": \"bar\"}").get(); IndexShard shard = indexService.getShard(0); @@ -299,7 +293,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { assertEquals(0, search.totalHits); } // refresh every millisecond - metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL, "1ms")).build(); + metaData = IndexMetaData.builder(indexService.getMetaData()).settings(Settings.builder().put(indexService.getMetaData().getSettings()).put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "1ms")).build(); indexService.updateMetaData(metaData); assertBusy(() -> { try (Engine.Searcher searcher = shard.acquireSearcher("test")) { @@ -313,8 +307,8 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testAsyncFsyncActuallyWorks() throws Exception { Settings settings = Settings.builder() - .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, "10ms") // very often :) - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC) + .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "10ms") // very often :) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC) .build(); IndexService indexService = createIndex("test", settings); ensureGreen("test"); @@ -328,7 +322,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testNoFsyncTaskIfDisabled() { Settings settings = Settings.builder() - .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, "0ms") // disable + .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "0ms") // disable .build(); IndexService indexService = createIndex("test", settings); assertNull(indexService.getFsyncTask()); diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 316badf376b..56179d5390b 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -20,16 +20,22 @@ package org.elasticsearch.index; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Consumer; +import java.util.function.Function; public class IndexSettingsTests extends ESTestCase { @@ -38,13 +44,14 @@ public class IndexSettingsTests extends ESTestCase { Version version = VersionUtils.getPreviousVersion(); Settings theSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); final AtomicInteger integer = new AtomicInteger(0); - Consumer settingsConsumer = (s) -> integer.set(s.getAsInt("index.test.setting.int", -1)); + Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, true, Setting.Scope.INDEX); IndexMetaData metaData = newIndexMeta("index", theSettings); - IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.singleton(settingsConsumer)); + IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting); + settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set); + assertEquals(version, settings.getIndexVersionCreated()); assertEquals("0xdeadbeef", settings.getUUID()); - assertEquals(1, settings.getUpdateListeners().size()); assertFalse(settings.updateIndexMetaData(metaData)); assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap()); assertEquals(0, integer.get()); @@ -58,11 +65,12 @@ public class IndexSettingsTests extends ESTestCase { .put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); final AtomicInteger integer = new AtomicInteger(0); final StringBuilder builder = new StringBuilder(); - Consumer settingsConsumer = (s) -> { - integer.set(s.getAsInt("index.test.setting.int", -1)); - builder.append(s.get("index.not.updated", "")); - }; - IndexSettings settings = new IndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, Collections.singleton(settingsConsumer)); + Setting integerSetting = Setting.intSetting("index.test.setting.int", -1, true, Setting.Scope.INDEX); + Setting notUpdated = new Setting<>("index.not.updated", "", Function.identity(), true, Setting.Scope.INDEX); + + IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting, notUpdated); + settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set); + settings.getScopedSettings().addSettingsUpdateConsumer(notUpdated, builder::append); assertEquals(0, integer.get()); assertEquals("", builder.toString()); IndexMetaData newMetaData = newIndexMeta("index", Settings.builder().put(settings.getIndexMetaData().getSettings()).put("index.test.setting.int", 42).build()); @@ -73,30 +81,14 @@ public class IndexSettingsTests extends ESTestCase { integer.set(0); assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(settings.getIndexMetaData().getSettings()).put("index.not.updated", "boom").build()))); assertEquals("boom", builder.toString()); - assertEquals(42, integer.get()); + assertEquals("not updated - we preserve the old settings", 0, integer.get()); } - public void testListenerCanThrowException() { - Version version = VersionUtils.getPreviousVersion(); - Settings theSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); - final AtomicInteger integer = new AtomicInteger(0); - Consumer settingsConsumer = (s) -> integer.set(s.getAsInt("index.test.setting.int", -1)); - Consumer exceptionConsumer = (s) -> {throw new RuntimeException("boom");}; - List> list = new ArrayList<>(); - list.add(settingsConsumer); - list.add(exceptionConsumer); - Collections.shuffle(list, random()); - IndexSettings settings = new IndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, list); - assertEquals(0, integer.get()); - assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 42).build()))); - assertEquals(42, integer.get()); - } - public void testSettingsConsistency() { Version version = VersionUtils.getPreviousVersion(); IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()); - IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); assertEquals(version, settings.getIndexVersionCreated()); assertEquals("_na_", settings.getUUID()); try { @@ -107,7 +99,7 @@ public class IndexSettingsTests extends ESTestCase { } metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build()); - settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + settings = new IndexSettings(metaData, Settings.EMPTY); try { settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("index.test.setting.int", 42).build())); fail("uuid missing/change"); @@ -117,24 +109,31 @@ public class IndexSettingsTests extends ESTestCase { assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap()); } + public IndexSettings newIndexSettings(IndexMetaData metaData, Settings nodeSettings, Setting... settings) { + Set> settingSet = new HashSet<>(IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + if (settings.length > 0) { + settingSet.addAll(Arrays.asList(settings)); + } + return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex()), new IndexScopedSettings(Settings.EMPTY, settingSet)); + } + public void testNodeSettingsAreContained() { final int numShards = randomIntBetween(1, 10); final int numReplicas = randomIntBetween(0, 10); Settings theSettings = Settings.settingsBuilder(). - put("index.foo.bar", 42) + put("index.foo.bar", 0) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numReplicas) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards).build(); - Settings nodeSettings = Settings.settingsBuilder().put("node.foo.bar", 43).build(); + Settings nodeSettings = Settings.settingsBuilder().put("index.foo.bar", 43).build(); final AtomicInteger indexValue = new AtomicInteger(0); - final AtomicInteger nodeValue = new AtomicInteger(0); - Consumer settingsConsumer = (s) -> {indexValue.set(s.getAsInt("index.foo.bar", -1)); nodeValue.set(s.getAsInt("node.foo.bar", -1));}; - IndexSettings settings = new IndexSettings(newIndexMeta("index", theSettings), nodeSettings, Collections.singleton(settingsConsumer)); + Setting integerSetting = Setting.intSetting("index.foo.bar", -1, true, Setting.Scope.INDEX); + IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), nodeSettings, integerSetting); + settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, indexValue::set); assertEquals(numReplicas, settings.getNumberOfReplicas()); assertEquals(numShards, settings.getNumberOfShards()); assertEquals(0, indexValue.get()); - assertEquals(0, nodeValue.get()); assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.settingsBuilder(). put("index.foo.bar", 42) @@ -142,13 +141,16 @@ public class IndexSettingsTests extends ESTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards).build()))); assertEquals(42, indexValue.get()); - assertEquals(43, nodeValue.get()); assertSame(nodeSettings, settings.getNodeSettings()); + assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numReplicas + 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards).build()))); + assertEquals(43, indexValue.get()); } - private IndexMetaData newIndexMeta(String name, Settings indexSettings) { + public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) @@ -162,19 +164,124 @@ public class IndexSettingsTests extends ESTestCase { public void testUpdateDurability() { IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, "async") + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async") .build()); - IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); assertEquals(Translog.Durability.ASYNC, settings.getTranslogDurability()); - settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, "request").build())); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "request").build())); assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability()); metaData = newIndexMeta("index", Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build()); - settings = new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + settings = new IndexSettings(metaData, Settings.EMPTY); assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability()); // test default } + public void testIsWarmerEnabled() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_WARMER_ENABLED_SETTING.getKey(), false) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertFalse(settings.isWarmerEnabled()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_WARMER_ENABLED_SETTING.getKey(), "true").build())); + assertTrue(settings.isWarmerEnabled()); + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + assertTrue(settings.isWarmerEnabled()); + } + public void testRefreshInterval() { + String refreshInterval = getRandomTimeString(); + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(TimeValue.parseTimeValue(refreshInterval, new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), settings.getRefreshInterval()); + String newRefreshInterval = getRandomTimeString(); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), newRefreshInterval).build())); + assertEquals(TimeValue.parseTimeValue(newRefreshInterval, new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), settings.getRefreshInterval()); + } + + private String getRandomTimeString() { + int refreshIntervalInt= randomFrom(-1, Math.abs(randomInt())); + String refreshInterval = Integer.toString(refreshIntervalInt); + if (refreshIntervalInt >= 0) { + refreshInterval += randomFrom("s", "ms", "h"); + } + return refreshInterval; + } + + public void testMaxResultWindow() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), 15) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(15, settings.getMaxResultWindow()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), 42).build())); + assertEquals(42, settings.getMaxResultWindow()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertEquals(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY).intValue(), settings.getMaxResultWindow()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY).intValue(), settings.getMaxResultWindow()); + } + + public void testGCDeletesSetting() { + TimeValue gcDeleteSetting = new TimeValue(Math.abs(randomInt()), TimeUnit.MILLISECONDS); + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), gcDeleteSetting.getStringRep()) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(TimeValue.parseTimeValue(gcDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis()); + TimeValue newGCDeleteSetting = new TimeValue(Math.abs(randomInt()), TimeUnit.MILLISECONDS); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), newGCDeleteSetting.getStringRep()).build())); + assertEquals(TimeValue.parseTimeValue(newGCDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), randomBoolean() ? -1 : new TimeValue(-1, TimeUnit.MILLISECONDS)).build())); + assertEquals(-1, settings.getGcDeletesInMillis()); + } + + public void testIsTTLPurgeDisabled() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING.getKey(), false) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertFalse(settings.isTTLPurgeDisabled()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING.getKey(), "true").build())); + assertTrue(settings.isTTLPurgeDisabled()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertFalse("reset to default", settings.isTTLPurgeDisabled()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + assertFalse(settings.isTTLPurgeDisabled()); + } + + public void testTranslogFlushSizeThreshold() { + ByteSizeValue translogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt())); + ByteSizeValue actualValue = ByteSizeValue.parseBytesSizeValue(translogFlushThresholdSize.toString(), IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey()); + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), translogFlushThresholdSize.toString()) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + assertEquals(actualValue, settings.getFlushThresholdSize()); + ByteSizeValue newTranslogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt())); + ByteSizeValue actualNewTranslogFlushThresholdSize = ByteSizeValue.parseBytesSizeValue(newTranslogFlushThresholdSize.toString(), IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), newTranslogFlushThresholdSize.toString()).build())); + assertEquals(actualNewTranslogFlushThresholdSize, settings.getFlushThresholdSize()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index 69f2e0a6d4a..5d54f7731c2 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShadowIndexShard; import org.elasticsearch.index.translog.TranslogStats; @@ -86,7 +87,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { private Settings nodeSettings(String dataPath) { return Settings.builder() .put("node.add_id_to_custom_path", false) - .put("path.shared_data", dataPath) + .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath) .put("index.store.fs.fs_lock", randomFrom("native", "simple")) .build(); } @@ -181,7 +182,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { Settings idxSettings = Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2) - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) .put(IndexMetaData.SETTING_DATA_PATH, dataPath.toAbsolutePath().toString()) .put(IndexMetaData.SETTING_SHADOW_REPLICAS, true) .put(IndexMetaData.SETTING_SHARED_FILESYSTEM, true) @@ -443,7 +444,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { Path dataPath = createTempDir(); Settings nodeSettings = Settings.builder() .put("node.add_id_to_custom_path", false) - .put("path.shared_data", dataPath) + .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath) .build(); String node1 = internalCluster().startNode(nodeSettings); diff --git a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index e39c0a805fe..8fd6876b4b2 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -22,7 +22,11 @@ package org.elasticsearch.index; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.IntField; import org.apache.lucene.document.StringField; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexingSlowLog.SlowLogParsedDocumentPrinter; import org.elasticsearch.index.mapper.ParsedDocument; @@ -51,4 +55,155 @@ public class IndexingSlowLogTests extends ESTestCase { p = new SlowLogParsedDocumentPrinter(pd, 10, true, 3); assertThat(p.toString(), containsString("source[{\"f]")); } + + public void testReformatSetting() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), false) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + IndexingSlowLog log = new IndexingSlowLog(settings); + assertFalse(log.isReformat()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), "true").build())); + assertTrue(log.isReformat()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), "false").build())); + assertFalse(log.isReformat()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertTrue(log.isReformat()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new IndexingSlowLog(settings); + assertTrue(log.isReformat()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_REFORMAT_SETTING.getKey(), "NOT A BOOLEAN").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse value [NOT A BOOLEAN] cannot be parsed to boolean [ true/1/on/yes OR false/0/off/no ]"); + } + assertTrue(log.isReformat()); + } + + public void testLevelSetting() { + SlowLogLevel level = randomFrom(SlowLogLevel.values()); + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + IndexingSlowLog log = new IndexingSlowLog(settings); + assertEquals(level, log.getLevel()); + level = randomFrom(SlowLogLevel.values()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level).build())); + assertEquals(level, log.getLevel()); + level = randomFrom(SlowLogLevel.values()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level).build())); + assertEquals(level, log.getLevel()); + + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), level).build())); + assertEquals(level, log.getLevel()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertEquals(SlowLogLevel.TRACE, log.getLevel()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new IndexingSlowLog(settings); + assertTrue(log.isReformat()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_LEVEL_SETTING.getKey(), "NOT A LEVEL").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "No enum constant org.elasticsearch.index.SlowLogLevel.NOT A LEVEL"); + } + assertEquals(SlowLogLevel.TRACE, log.getLevel()); + } + + public void testSetLevels() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "100ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "200ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "300ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "400ms") + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + IndexingSlowLog log = new IndexingSlowLog(settings); + assertEquals(TimeValue.timeValueMillis(100).nanos(), log.getIndexTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(200).nanos(), log.getIndexDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(300).nanos(), log.getIndexInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(400).nanos(), log.getIndexWarnThreshold()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "120ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "220ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "320ms") + .put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "420ms").build())); + + + assertEquals(TimeValue.timeValueMillis(120).nanos(), log.getIndexTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(220).nanos(), log.getIndexDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(320).nanos(), log.getIndexInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(420).nanos(), log.getIndexWarnThreshold()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings.updateIndexMetaData(metaData); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexWarnThreshold()); + + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new IndexingSlowLog(settings); + + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getIndexWarnThreshold()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_TRACE_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_DEBUG_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_INFO_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_THRESHOLD_INDEX_WARN_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.indexing.slowlog.threshold.index.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + } + + private IndexMetaData newIndexMeta(String name, Settings indexSettings) { + Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(indexSettings) + .build(); + IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); + return metaData; + } } diff --git a/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java b/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java index afc034844ef..1e3e51c0838 100644 --- a/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/MergePolicySettingsTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.index; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.TieredMergePolicy; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -31,106 +33,106 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; +import static org.elasticsearch.index.IndexSettingsTests.newIndexMeta; import static org.hamcrest.Matchers.equalTo; public class MergePolicySettingsTests extends ESTestCase { protected final ShardId shardId = new ShardId(new Index("index"), 1); public void testCompoundFileSettings() throws IOException { - assertThat(new MergePolicyConfig(logger, EMPTY_SETTINGS).getMergePolicy().getNoCFSRatio(), equalTo(0.1)); - assertThat(new MergePolicyConfig(logger, build(true)).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); - assertThat(new MergePolicyConfig(logger, build(0.5)).getMergePolicy().getNoCFSRatio(), equalTo(0.5)); - assertThat(new MergePolicyConfig(logger, build(1.0)).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); - assertThat(new MergePolicyConfig(logger, build("true")).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); - assertThat(new MergePolicyConfig(logger, build("True")).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); - assertThat(new MergePolicyConfig(logger, build("False")).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); - assertThat(new MergePolicyConfig(logger, build("false")).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); - assertThat(new MergePolicyConfig(logger, build(false)).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); - assertThat(new MergePolicyConfig(logger, build(0)).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); - assertThat(new MergePolicyConfig(logger, build(0.0)).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(Settings.EMPTY)).getMergePolicy().getNoCFSRatio(), equalTo(0.1)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(true))).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(0.5))).getMergePolicy().getNoCFSRatio(), equalTo(0.5)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(1.0))).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build("true"))).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build("True"))).getMergePolicy().getNoCFSRatio(), equalTo(1.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build("False"))).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build("false"))).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(false))).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(0))).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + assertThat(new MergePolicyConfig(logger, indexSettings(build(0.0))).getMergePolicy().getNoCFSRatio(), equalTo(0.0)); + } + + private static IndexSettings indexSettings(Settings settings) { + return new IndexSettings(newIndexMeta("test", settings), Settings.EMPTY); } public void testNoMerges() { - MergePolicyConfig mp = new MergePolicyConfig(logger, Settings.builder().put(MergePolicyConfig.INDEX_MERGE_ENABLED, false).build()); + MergePolicyConfig mp = new MergePolicyConfig(logger, indexSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_ENABLED, false).build())); assertTrue(mp.getMergePolicy() instanceof NoMergePolicy); } public void testUpdateSettings() throws IOException { - { - MergePolicyConfig mp = new MergePolicyConfig(logger, EMPTY_SETTINGS); - assertThat((mp.getMergePolicy()).getNoCFSRatio(), equalTo(0.1)); - - mp.onRefreshSettings(build(1.0)); - assertThat((mp.getMergePolicy()).getNoCFSRatio(), equalTo(1.0)); - - mp.onRefreshSettings(build(0.1)); - assertThat((mp.getMergePolicy()).getNoCFSRatio(), equalTo(0.1)); - - mp.onRefreshSettings(build(0.0)); - assertThat((mp.getMergePolicy()).getNoCFSRatio(), equalTo(0.0)); - } - - + IndexSettings indexSettings = indexSettings(EMPTY_SETTINGS); + assertThat(indexSettings.getMergePolicy().getNoCFSRatio(), equalTo(0.1)); + indexSettings = indexSettings(build(0.9)); + assertThat((indexSettings.getMergePolicy()).getNoCFSRatio(), equalTo(0.9)); + indexSettings.updateIndexMetaData(newIndexMeta("index", build(0.1))); + assertThat((indexSettings.getMergePolicy()).getNoCFSRatio(), equalTo(0.1)); + indexSettings.updateIndexMetaData(newIndexMeta("index", build(0.0))); + assertThat((indexSettings.getMergePolicy()).getNoCFSRatio(), equalTo(0.0)); + indexSettings.updateIndexMetaData(newIndexMeta("index", build("true"))); + assertThat((indexSettings.getMergePolicy()).getNoCFSRatio(), equalTo(1.0)); + indexSettings.updateIndexMetaData(newIndexMeta("index", build("false"))); + assertThat((indexSettings.getMergePolicy()).getNoCFSRatio(), equalTo(0.0)); } public void testTieredMergePolicySettingsUpdate() throws IOException { - MergePolicyConfig mp = new MergePolicyConfig(logger, EMPTY_SETTINGS); - assertThat(mp.getMergePolicy().getNoCFSRatio(), equalTo(0.1)); + IndexSettings indexSettings = indexSettings(Settings.EMPTY); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED, MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d, 0.0d); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING.getKey(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d, 0.0d); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getFloorSegmentMB(), MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mbFrac(), 0); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT, new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb() + 1, ByteSizeUnit.MB)).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb() + 1, ByteSizeUnit.MB).mbFrac(), 0.001); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mbFrac(), 0); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_FLOOR_SEGMENT_SETTING.getKey(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb() + 1, ByteSizeUnit.MB)).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb() + 1, ByteSizeUnit.MB).mbFrac(), 0.001); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE - 1).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE-1); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE - 1).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE - 1); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT, MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT - 1).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT-1); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_EXPLICIT_SETTING.getKey(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT - 1).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT-1); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergedSegmentMB(), MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.mbFrac(), 0.0001); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT, new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1)).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1).mbFrac(), 0.0001); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.mbFrac(), 0.0001); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING.getKey(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1)).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1).mbFrac(), 0.0001); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT, MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1, 0); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING.getKey(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1, 0); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0); - mp.onRefreshSettings(Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1).build()); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1, 0); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0); + indexSettings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1).build())); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1, 0); - mp.onRefreshSettings(EMPTY_SETTINGS); // update without the settings and see if we stick to the values - - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED + 1.0d, 0.0d); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb() + 1, ByteSizeUnit.MB).mbFrac(), 0.001); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE-1); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT-1); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1).mbFrac(), 0.0001); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT + 1, 0); - assertEquals(((TieredMergePolicy) mp.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER + 1, 0); + indexSettings.updateIndexMetaData(newIndexMeta("index", EMPTY_SETTINGS)); // see if defaults are restored + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getForceMergeDeletesPctAllowed(), MergePolicyConfig.DEFAULT_EXPUNGE_DELETES_ALLOWED, 0.0d); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getFloorSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_FLOOR_SEGMENT.mb(), ByteSizeUnit.MB).mbFrac(), 0.00); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnce(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergeAtOnceExplicit(), MergePolicyConfig.DEFAULT_MAX_MERGE_AT_ONCE_EXPLICIT); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getMaxMergedSegmentMB(), new ByteSizeValue(MergePolicyConfig.DEFAULT_MAX_MERGED_SEGMENT.bytes() + 1).mbFrac(), 0.0001); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getReclaimDeletesWeight(), MergePolicyConfig.DEFAULT_RECLAIM_DELETES_WEIGHT, 0); + assertEquals(((TieredMergePolicy) indexSettings.getMergePolicy()).getSegmentsPerTier(), MergePolicyConfig.DEFAULT_SEGMENTS_PER_TIER, 0); } public Settings build(String value) { - return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT, value).build(); + return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), value).build(); } public Settings build(double value) { - return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT, value).build(); + return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), value).build(); } public Settings build(int value) { - return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT, value).build(); + return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), value).build(); } public Settings build(boolean value) { - return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT, value).build(); + return Settings.builder().put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), value).build(); } diff --git a/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java new file mode 100644 index 00000000000..15bf60baab4 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -0,0 +1,253 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + + +public class SearchSlowLogTests extends ESTestCase { + + public void testReformatSetting() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT.getKey(), false) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + SearchSlowLog log = new SearchSlowLog(settings); + assertFalse(log.isReformat()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT.getKey(), "true").build())); + assertTrue(log.isReformat()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT.getKey(), "false").build())); + assertFalse(log.isReformat()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertTrue(log.isReformat()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new SearchSlowLog(settings); + assertTrue(log.isReformat()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_REFORMAT.getKey(), "NOT A BOOLEAN").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse value [NOT A BOOLEAN] cannot be parsed to boolean [ true/1/on/yes OR false/0/off/no ]"); + } + assertTrue(log.isReformat()); + } + + public void testLevelSetting() { + SlowLogLevel level = randomFrom(SlowLogLevel.values()); + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), level) + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + SearchSlowLog log = new SearchSlowLog(settings); + assertEquals(level, log.getLevel()); + level = randomFrom(SlowLogLevel.values()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), level).build())); + assertEquals(level, log.getLevel()); + level = randomFrom(SlowLogLevel.values()); + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), level).build())); + assertEquals(level, log.getLevel()); + + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), level).build())); + assertEquals(level, log.getLevel()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); + assertEquals(SlowLogLevel.TRACE, log.getLevel()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new SearchSlowLog(settings); + assertTrue(log.isReformat()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_LEVEL.getKey(), "NOT A LEVEL").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "No enum constant org.elasticsearch.index.SlowLogLevel.NOT A LEVEL"); + } + assertEquals(SlowLogLevel.TRACE, log.getLevel()); + } + + public void testSetQueryLevels() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING.getKey(), "100ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING.getKey(), "200ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING.getKey(), "300ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING.getKey(), "400ms") + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + SearchSlowLog log = new SearchSlowLog(settings); + assertEquals(TimeValue.timeValueMillis(100).nanos(), log.getQueryTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(200).nanos(), log.getQueryDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(300).nanos(), log.getQueryInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(400).nanos(), log.getQueryWarnThreshold()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING.getKey(), "120ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING.getKey(), "220ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING.getKey(), "320ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING.getKey(), "420ms").build())); + + + assertEquals(TimeValue.timeValueMillis(120).nanos(), log.getQueryTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(220).nanos(), log.getQueryDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(320).nanos(), log.getQueryInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(420).nanos(), log.getQueryWarnThreshold()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings.updateIndexMetaData(metaData); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryWarnThreshold()); + + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new SearchSlowLog(settings); + + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getQueryWarnThreshold()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_TRACE_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_DEBUG_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_INFO_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_QUERY_WARN_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.query.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + } + + public void testSetFetchLevels() { + IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING.getKey(), "100ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING.getKey(), "200ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING.getKey(), "300ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING.getKey(), "400ms") + .build()); + IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); + SearchSlowLog log = new SearchSlowLog(settings); + assertEquals(TimeValue.timeValueMillis(100).nanos(), log.getFetchTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(200).nanos(), log.getFetchDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(300).nanos(), log.getFetchInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(400).nanos(), log.getFetchWarnThreshold()); + + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING.getKey(), "120ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING.getKey(), "220ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING.getKey(), "320ms") + .put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING.getKey(), "420ms").build())); + + + assertEquals(TimeValue.timeValueMillis(120).nanos(), log.getFetchTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(220).nanos(), log.getFetchDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(320).nanos(), log.getFetchInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(420).nanos(), log.getFetchWarnThreshold()); + + metaData = newIndexMeta("index", Settings.settingsBuilder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .build()); + settings.updateIndexMetaData(metaData); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchWarnThreshold()); + + settings = new IndexSettings(metaData, Settings.EMPTY); + log = new SearchSlowLog(settings); + + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchTraceThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchDebugThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchInfoThreshold()); + assertEquals(TimeValue.timeValueMillis(-1).nanos(), log.getFetchWarnThreshold()); + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_TRACE_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.trace] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_DEBUG_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.debug] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_INFO_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.info] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + + try { + settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(SearchSlowLog.INDEX_SEARCH_SLOWLOG_THRESHOLD_FETCH_WARN_SETTING.getKey(), "NOT A TIME VALUE").build())); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals(ex.getMessage(), "Failed to parse setting [index.search.slowlog.threshold.fetch.warn] with value [NOT A TIME VALUE] as a time value: unit is missing or unrecognized"); + } + } + + + private IndexMetaData newIndexMeta(String name, Settings indexSettings) { + Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(indexSettings) + .build(); + IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); + return metaData; + } +} diff --git a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java index f62d44df43f..7dbff244fcc 100644 --- a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java +++ b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java @@ -18,18 +18,17 @@ */ package org.elasticsearch.index; -import org.elasticsearch.cluster.ClusterModule; -import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import java.util.Collection; import java.util.Collections; -import java.util.function.Consumer; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -44,7 +43,7 @@ public class SettingsListenerIT extends ESIntegTestCase { public static class SettingsListenerPlugin extends Plugin { private final SettingsTestingService service = new SettingsTestingService(); - + private static final Setting SETTING = Setting.intSetting("index.test.new.setting", 0, true, Setting.Scope.INDEX); /** * The name of the plugin. */ @@ -61,15 +60,15 @@ public class SettingsListenerIT extends ESIntegTestCase { return "Settings Listenern Plugin"; } - public void onModule(ClusterModule clusterModule) { - clusterModule.registerIndexDynamicSetting("index.test.new.setting", Validator.INTEGER); + public void onModule(SettingsModule settingsModule) { + settingsModule.registerSetting(SettingsTestingService.VALUE); } @Override public void onIndexModule(IndexModule module) { if (module.getIndex().getName().equals("test")) { // only for the test index - module.addIndexSettingsListener(service); - service.accept(module.getSettings()); + module.addSettingsUpdateConsumer(SettingsTestingService.VALUE, service::setValue); + service.setValue(SettingsTestingService.VALUE.get(module.getSettings())); } } @@ -92,13 +91,14 @@ public class SettingsListenerIT extends ESIntegTestCase { } } - public static class SettingsTestingService implements Consumer { + public static class SettingsTestingService { public volatile int value; + public static Setting VALUE = Setting.intSetting("index.test.new.setting", -1, -1, true, Setting.Scope.INDEX); - @Override - public void accept(Settings settings) { - value = settings.getAsInt("index.test.new.setting", -1); + public void setValue(int value) { + this.value = value; } + } public void testListener() { @@ -130,5 +130,13 @@ public class SettingsListenerIT extends ESIntegTestCase { for (SettingsTestingService instance : internalCluster().getInstances(SettingsTestingService.class)) { assertEquals(42, instance.value); } + + try { + client().admin().indices().prepareUpdateSettings("other").setSettings(Settings.builder() + .put("index.test.new.setting", -5)).get(); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [-5] for setting [index.test.new.setting] must be >= -1", ex.getMessage()); + } } } diff --git a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java index 1f083466896..9dfeb4438ad 100644 --- a/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/index/TransportIndexFailuresIT.java @@ -53,8 +53,8 @@ public class TransportIndexFailuresIT extends ESIntegTestCase { private static final Settings nodeSettings = Settings.settingsBuilder() .put("discovery.type", "zen") // <-- To override the local setting if set externally - .put(FaultDetection.SETTING_PING_TIMEOUT, "1s") // <-- for hitting simulated network failures quickly - .put(FaultDetection.SETTING_PING_RETRIES, "1") // <-- for hitting simulated network failures quickly + .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly + .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // <-- for hitting simulated network failures quickly .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly .put("discovery.zen.minimum_master_nodes", 1) .build(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java index 17bd9d587b3..ba3f8b2e100 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/ASCIIFoldingTokenFilterFactoryTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -31,7 +32,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class ASCIIFoldingTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_ascii_folding.type", "asciifolding") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_ascii_folding"); @@ -44,7 +45,7 @@ public class ASCIIFoldingTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testPreserveOriginal() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_ascii_folding.type", "asciifolding") .put("index.analysis.filter.my_ascii_folding.preserve_original", true) .build()); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java index b2df4a9d416..afb34cda8a0 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java @@ -179,6 +179,8 @@ public class AnalysisFactoryTests extends ESTestCase { put("typeaspayload", Void.class); // fingerprint put("fingerprint", Void.class); + // for tee-sinks + put("daterecognizer", Void.class); }}; public void testTokenFilters() { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java index f844d9ac7a6..5da54158484 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisModuleTests.java @@ -81,7 +81,7 @@ public class AnalysisModuleTests extends ModuleTestCase { private Settings loadFromClasspath(String path) { return settingsBuilder().loadFromStream(path, getClass().getResourceAsStream(path)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); } @@ -106,7 +106,7 @@ public class AnalysisModuleTests extends ModuleTestCase { String yaml = "/org/elasticsearch/index/analysis/test1.yml"; Settings settings2 = settingsBuilder() .loadFromStream(yaml, getClass().getResourceAsStream(yaml)) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_0_90_0) .build(); AnalysisRegistry newRegistry = getNewRegistry(settings2); @@ -130,7 +130,7 @@ public class AnalysisModuleTests extends ModuleTestCase { private void assertTokenFilter(String name, Class clazz) throws IOException { Settings settings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir().toString()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter(name); Tokenizer tokenizer = new WhitespaceTokenizer(); @@ -215,7 +215,7 @@ public class AnalysisModuleTests extends ModuleTestCase { public void testWordListPath() throws Exception { Settings settings = Settings.builder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); Environment env = new Environment(settings); String[] words = new String[]{"donau", "dampf", "schiff", "spargel", "creme", "suppe"}; @@ -243,7 +243,7 @@ public class AnalysisModuleTests extends ModuleTestCase { public void testUnderscoreInAnalyzerName() throws IOException { Settings settings = Settings.builder() .put("index.analysis.analyzer._invalid_name.tokenizer", "keyword") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, "1") .build(); try { @@ -258,7 +258,7 @@ public class AnalysisModuleTests extends ModuleTestCase { Settings settings = Settings.builder() .put("index.analysis.analyzer.valid_name.tokenizer", "keyword") .put("index.analysis.analyzer.valid_name.alias", "_invalid_name") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, "1") .build(); try { @@ -275,7 +275,7 @@ public class AnalysisModuleTests extends ModuleTestCase { .put("index.analysis.analyzer.custom1.position_offset_gap", "128") .put("index.analysis.analyzer.custom2.tokenizer", "standard") .put("index.analysis.analyzer.custom2.position_increment_gap", "256") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.V_1_7_1)) .build(); @@ -295,7 +295,7 @@ public class AnalysisModuleTests extends ModuleTestCase { .put("index.analysis.analyzer.custom.tokenizer", "standard") .put("index.analysis.analyzer.custom.position_offset_gap", "128") .put("index.analysis.analyzer.custom.position_increment_gap", "256") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.V_1_7_1)) .build(); @@ -312,7 +312,7 @@ public class AnalysisModuleTests extends ModuleTestCase { Settings settings = settingsBuilder() .put("index.analysis.analyzer.custom.tokenizer", "standard") .put("index.analysis.analyzer.custom.position_offset_gap", "128") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); try { @@ -326,7 +326,7 @@ public class AnalysisModuleTests extends ModuleTestCase { public void testRegisterHunspellDictionary() throws Exception { Settings settings = settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); AnalysisModule module = new AnalysisModule(new Environment(settings)); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java index f467aa289f8..3dfb0975ab4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisServiceTests.java @@ -53,7 +53,11 @@ public class AnalysisServiceTests extends ESTestCase { public void testDefaultAnalyzers() throws IOException { Version version = VersionUtils.randomVersion(getRandom()); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put("path.home", createTempDir().toString()).build(); + Settings settings = Settings + .builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, version) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); assertThat(analysisService.defaultIndexAnalyzer().analyzer(), instanceOf(StandardAnalyzer.class)); @@ -123,7 +127,7 @@ public class AnalysisServiceTests extends ESTestCase { public void testConfigureCamelCaseTokenFilter() throws IOException { // tests a filter that - Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.wordDelimiter.type", "word_delimiter") @@ -169,7 +173,7 @@ public class AnalysisServiceTests extends ESTestCase { } public void testCameCaseOverride() throws IOException { - Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.wordDelimiter.type", "word_delimiter") @@ -196,7 +200,7 @@ public class AnalysisServiceTests extends ESTestCase { } public void testBuiltInAnalyzersAreCached() throws IOException { - Settings settings = Settings.builder().put("path.home", createTempDir().toString()).build(); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); Settings indexSettings = settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), indexSettings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java index 1404716b0c8..7460ddd3e55 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTestsHelper.java @@ -37,7 +37,7 @@ public class AnalysisTestsHelper { public static AnalysisService createAnalysisServiceFromClassPath(Path baseDir, String resource) throws IOException { Settings settings = Settings.settingsBuilder() .loadFromStream(resource, AnalysisTestsHelper.class.getResourceAsStream(resource)) - .put("path.home", baseDir.toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), baseDir.toString()) .build(); return createAnalysisServiceFromSettings(settings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java index 63acbc81c8d..a163d9e42b4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalyzerBackwardsCompatTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.analysis; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -41,7 +42,7 @@ public class AnalyzerBackwardsCompatTests extends ESTokenStreamTestCase { builder.put(SETTING_VERSION_CREATED, version); } builder.put("index.analysis.analyzer.foo.type", type); - builder.put("path.home", createTempDir().toString()); + builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build()); NamedAnalyzer analyzer = analysisService.analyzer("foo"); assertNotNull(analyzer); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java index dd08d470136..c39c6e702f4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CharFilterTests.java @@ -40,7 +40,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .putArray("index.analysis.char_filter.my_mapping.mappings", "ph=>f", "qu=>q") .put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard") .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "my_mapping") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); @@ -58,7 +58,7 @@ public class CharFilterTests extends ESTokenStreamTestCase { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.analyzer.custom_with_char_filter.tokenizer", "standard") .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "html_strip") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, settings); AnalysisService analysisService = new AnalysisRegistry(null, new Environment(settings)).build(idxSettings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java index a097d55f4a3..e00f5f67d87 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CompoundAnalysisTests.java @@ -98,7 +98,7 @@ public class CompoundAnalysisTests extends ESTestCase { return settingsBuilder() .loadFromStream(json, getClass().getResourceAsStream(json)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); } @@ -107,7 +107,7 @@ public class CompoundAnalysisTests extends ESTestCase { return settingsBuilder() .loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); } } diff --git a/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java index 02c4e1a2642..51d8b9214ad 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/HunspellTokenFilterFactoryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.analysis; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -30,8 +31,8 @@ import static org.hamcrest.Matchers.is; public class HunspellTokenFilterFactoryTests extends ESTestCase { public void testDedup() throws IOException { Settings settings = settingsBuilder() - .put("path.home", createTempDir().toString()) - .put("path.conf", getDataPath("/indices/analyze/conf_dir")) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) .put("index.analysis.filter.en_US.type", "hunspell") .put("index.analysis.filter.en_US.locale", "en_US") .build(); @@ -43,8 +44,8 @@ public class HunspellTokenFilterFactoryTests extends ESTestCase { assertThat(hunspellTokenFilter.dedup(), is(true)); settings = settingsBuilder() - .put("path.home", createTempDir().toString()) - .put("path.conf", getDataPath("/indices/analyze/conf_dir")) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) .put("index.analysis.filter.en_US.type", "hunspell") .put("index.analysis.filter.en_US.dedup", false) .put("index.analysis.filter.en_US.locale", "en_US") diff --git a/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java index 99c936cd346..a7179daff2a 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/KeepFilterFactoryTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import org.junit.Assert; @@ -41,7 +42,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase { public void testLoadOverConfiguredSettings() { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.broken_keep_filter.type", "keep") .put("index.analysis.filter.broken_keep_filter.keep_words_path", "does/not/exists.txt") .put("index.analysis.filter.broken_keep_filter.keep_words", "[\"Hello\", \"worlD\"]") @@ -57,7 +58,7 @@ public class KeepFilterFactoryTests extends ESTokenStreamTestCase { public void testKeepWordsPathSettings() { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.non_broken_keep_filter.type", "keep") .put("index.analysis.filter.non_broken_keep_filter.keep_words_path", "does/not/exists.txt") .build(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java index 1e8a0ba16ed..9111c929f95 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/KeepTypesFilterFactoryTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.standard.StandardTokenizer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -32,7 +33,7 @@ import static org.hamcrest.Matchers.instanceOf; public class KeepTypesFilterFactoryTests extends ESTokenStreamTestCase { public void testKeepTypes() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.keep_numbers.type", "keep_types") .putArray("index.analysis.filter.keep_numbers.types", new String[] {"", ""}) .build(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java index e133ffc79ae..b266be9f2bd 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/LimitTokenCountFilterFactoryTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -31,7 +32,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { Settings settings = Settings.settingsBuilder() .put("index.analysis.filter.limit_default.type", "limit") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); { @@ -58,7 +59,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.filter.limit_1.type", "limit") .put("index.analysis.filter.limit_1.max_token_count", 3) .put("index.analysis.filter.limit_1.consume_all_tokens", true) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); @@ -73,7 +74,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.filter.limit_1.type", "limit") .put("index.analysis.filter.limit_1.max_token_count", 3) .put("index.analysis.filter.limit_1.consume_all_tokens", false) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); @@ -89,7 +90,7 @@ public class LimitTokenCountFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.filter.limit_1.type", "limit") .put("index.analysis.filter.limit_1.max_token_count", 17) .put("index.analysis.filter.limit_1.consume_all_tokens", true) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("limit_1"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java index 4b7119df01b..8c6775a92ab 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternCaptureTokenFilterTests.java @@ -35,7 +35,7 @@ public class PatternCaptureTokenFilterTests extends ESTokenStreamTestCase { public void testPatternCaptureTokenFilter() throws Exception { String json = "/org/elasticsearch/index/analysis/pattern_capture.json"; Settings settings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .loadFromStream(json, getClass().getResourceAsStream(json)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 1a88fcbc0c1..f4792f0bc37 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -27,9 +27,12 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.util.Collection; import java.util.Locale; import static org.elasticsearch.test.VersionUtils.randomVersion; @@ -40,6 +43,12 @@ import static org.hamcrest.Matchers.is; * */ public class PreBuiltAnalyzerTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testThatDefaultAndStandardAnalyzerAreTheSameInstance() { Analyzer currentStandardAnalyzer = PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT); Analyzer currentDefaultAnalyzer = PreBuiltAnalyzers.DEFAULT.getAnalyzer(Version.CURRENT); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java index 737a991f0e0..37844dce69d 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StemmerTokenFilterFactoryTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.analysis.en.PorterStemFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.VersionUtils; @@ -50,7 +51,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.analyzer.my_english.tokenizer","whitespace") .put("index.analysis.analyzer.my_english.filter","my_english") .put(SETTING_VERSION_CREATED,v) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -83,7 +84,7 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.analyzer.my_porter2.tokenizer","whitespace") .put("index.analysis.analyzer.my_porter2.filter","my_porter2") .put(SETTING_VERSION_CREATED,v) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java index 90e55e98d7e..ebaf4cb5cc4 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopAnalyzerTests.java @@ -35,7 +35,7 @@ public class StopAnalyzerTests extends ESTokenStreamTestCase { String json = "/org/elasticsearch/index/analysis/stop.json"; Settings settings = settingsBuilder() .loadFromStream(json, getClass().getResourceAsStream(json)) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java index 1dbd9ac2bd9..2804f522afa 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/StopTokenFilterTests.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter; import org.apache.lucene.util.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -44,7 +45,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { if (random().nextBoolean()) { builder.put("index.analysis.filter.my_stop.version", "5.0"); } - builder.put("path.home", createTempDir().toString()); + builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()); Settings settings = builder.build(); try { AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -67,7 +68,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { } else { // don't specify } - builder.put("path.home", createTempDir().toString()); + builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(builder.build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); assertThat(tokenFilter, instanceOf(StopTokenFilterFactory.class)); @@ -86,7 +87,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { .put("index.analysis.filter.my_stop.type", "stop") .put("index.analysis.filter.my_stop.enable_position_increments", false) .put("index.analysis.filter.my_stop.version", "4.3") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); @@ -101,7 +102,7 @@ public class StopTokenFilterTests extends ESTokenStreamTestCase { Settings settings = Settings.settingsBuilder() .put("index.analysis.filter.my_stop.type", "stop") .put("index.analysis.filter.my_stop.remove_trailing", false) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_stop"); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java index 54810028ae3..a041694dde6 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/WordDelimiterTokenFilterFactoryTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -31,7 +32,7 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder; public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .build()); TokenFilterFactory tokenFilter = analysisService.tokenFilter("my_word_delimiter"); @@ -44,7 +45,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testCatenateWords() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false") @@ -59,7 +60,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testCatenateNumbers() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false") .put("index.analysis.filter.my_word_delimiter.catenate_numbers", "true") @@ -74,7 +75,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testCatenateAll() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "false") .put("index.analysis.filter.my_word_delimiter.generate_number_parts", "false") @@ -90,7 +91,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testSplitOnCaseChange() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.split_on_case_change", "false") .build()); @@ -104,7 +105,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testPreserveOriginal() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.preserve_original", "true") .build()); @@ -118,7 +119,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase public void testStemEnglishPossessive() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.stem_english_possessive", "false") .build()); @@ -133,7 +134,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase /** Correct offset order when doing both parts and concatenation: PowerShot is a synonym of Power */ public void testPartsAndCatenate() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") @@ -150,7 +151,7 @@ public class WordDelimiterTokenFilterFactoryTests extends ESTokenStreamTestCase * old offset order when doing both parts and concatenation: PowerShot is a synonym of Shot */ public void testDeprecatedPartsAndCatenate() throws IOException { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settingsBuilder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .put("index.analysis.filter.my_word_delimiter.type", "word_delimiter") .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") diff --git a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java index f7c346c6570..a9d3c8820fc 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/commongrams/CommonGramsTokenFilterFactoryTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.TokenFilterFactory; @@ -38,7 +39,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { public void testDefault() throws IOException { Settings settings = Settings.settingsBuilder() .put("index.analysis.filter.common_grams_default.type", "common_grams") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); try { @@ -54,7 +55,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams") .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -71,7 +72,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_default.type", "common_grams") .put("index.analysis.filter.common_grams_default.query_mode", false) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein") .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -90,7 +91,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_1.type", "common_grams") .put("index.analysis.filter.common_grams_1.ignore_case", true) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are") .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -104,7 +105,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_2.type", "common_grams") .put("index.analysis.filter.common_grams_2.ignore_case", false) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -118,7 +119,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams") .putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3"); @@ -134,7 +135,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { String json = "/org/elasticsearch/index/analysis/commongrams/commongrams.json"; Settings settings = Settings.settingsBuilder() .loadFromStream(json, getClass().getResourceAsStream(json)) - .put("path.home", createHome()) + .put(Environment.PATH_HOME_SETTING.getKey(), createHome()) .build(); { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); @@ -158,7 +159,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.filter.common_grams_1.query_mode", true) .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are") .put("index.analysis.filter.common_grams_1.ignore_case", true) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_1"); @@ -173,7 +174,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { .put("index.analysis.filter.common_grams_2.query_mode", true) .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") .put("index.analysis.filter.common_grams_2.ignore_case", false) - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_2"); @@ -187,7 +188,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_3.type", "common_grams") .put("index.analysis.filter.common_grams_3.query_mode", true) .putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_3"); @@ -201,7 +202,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { Settings settings = Settings.settingsBuilder().put("index.analysis.filter.common_grams_4.type", "common_grams") .put("index.analysis.filter.common_grams_4.query_mode", true) .putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); TokenFilterFactory tokenFilter = analysisService.tokenFilter("common_grams_4"); @@ -217,7 +218,7 @@ public class CommonGramsTokenFilterFactoryTests extends ESTokenStreamTestCase { String json = "/org/elasticsearch/index/analysis/commongrams/commongrams_query_mode.json"; Settings settings = Settings.settingsBuilder() .loadFromStream(json, getClass().getResourceAsStream(json)) - .put("path.home", createHome()) + .put(Environment.PATH_HOME_SETTING.getKey(), createHome()) .build(); { AnalysisService analysisService = AnalysisTestsHelper.createAnalysisServiceFromSettings(settings); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java index 3a6adca1c67..c4c664f222c 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java @@ -64,7 +64,7 @@ public class SynonymsAnalysisTests extends ESTestCase { String json = "/org/elasticsearch/index/analysis/synonyms/synonyms.json"; Settings settings = settingsBuilder(). loadFromStream(json, getClass().getResourceAsStream(json)) - .put("path.home", home) + .put(Environment.PATH_HOME_SETTING.getKey(), home) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index("index"), settings); diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 03a87dec232..c293237b5a0 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -60,6 +60,7 @@ import static org.hamcrest.Matchers.instanceOf; @SuppressCodecs("*") // we test against default codec so never get a random one here! public class CodecTests extends ESTestCase { + public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); @@ -105,7 +106,7 @@ public class CodecTests extends ESTestCase { private static CodecService createCodecService() throws IOException { Settings nodeSettings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); IndexSettings settings = IndexSettingsModule.newIndexSettings(new Index("_na"), nodeSettings); SimilarityService similarityService = new SimilarityService(settings, Collections.emptyMap()); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java index a873e61b1af..8a4da8e3ca5 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineSettingsTests.java @@ -43,9 +43,9 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { long gcDeletes = random().nextLong() & (Long.MAX_VALUE >> 11); Settings build = Settings.builder() - .put(IndexSettings.INDEX_GC_DELETES_SETTING, gcDeletes, TimeUnit.MILLISECONDS) + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), gcDeletes, TimeUnit.MILLISECONDS) .build(); - assertEquals(gcDeletes, build.getAsTime(IndexSettings.INDEX_GC_DELETES_SETTING, null).millis()); + assertEquals(gcDeletes, build.getAsTime(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), null).millis()); client().admin().indices().prepareUpdateSettings("foo").setSettings(build).get(); LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig(); @@ -58,7 +58,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { } Settings settings = Settings.builder() - .put(IndexSettings.INDEX_GC_DELETES_SETTING, 1000, TimeUnit.MILLISECONDS) + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), 1000, TimeUnit.MILLISECONDS) .build(); client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get(); assertEquals(engine.getGcDeletesInMillis(), 1000); @@ -66,7 +66,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { settings = Settings.builder() - .put(IndexSettings.INDEX_GC_DELETES_SETTING, "0ms") + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), "0ms") .build(); client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get(); @@ -74,7 +74,7 @@ public class InternalEngineSettingsTests extends ESSingleNodeTestCase { assertTrue(engine.config().isEnableGcDeletes()); settings = Settings.builder() - .put(IndexSettings.INDEX_GC_DELETES_SETTING, 1000, TimeUnit.MILLISECONDS) + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), 1000, TimeUnit.MILLISECONDS) .build(); client().admin().indices().prepareUpdateSettings("foo").setSettings(settings).get(); assertEquals(engine.getGcDeletesInMillis(), 1000); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 56affa3db23..2b72018aa8e 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -166,7 +166,7 @@ public class InternalEngineTests extends ESTestCase { } defaultSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() .put(IndexSettings.INDEX_GC_DELETES_SETTING, "1h") // make sure this doesn't kick in on us - .put(EngineConfig.INDEX_CODEC_SETTING, codecName) + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), codecName) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build()); // TODO randomize more settings threadPool = new ThreadPool(getClass().getName()); @@ -1600,8 +1600,7 @@ public class InternalEngineTests extends ESTestCase { } // now it should be OK. IndexSettings indexSettings = new IndexSettings(defaultSettings.getIndexMetaData(), - Settings.builder().put(defaultSettings.getSettings()).put(EngineConfig.INDEX_FORCE_NEW_TRANSLOG, true).build(), - Collections.emptyList()); + Settings.builder().put(defaultSettings.getSettings()).put(EngineConfig.INDEX_FORCE_NEW_TRANSLOG, true).build()); engine = createEngine(indexSettings, store, primaryTranslogDir, newMergePolicy()); } diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 2eca00dc5a6..71ad0e16909 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -118,7 +118,7 @@ public class ShadowEngineTests extends ESTestCase { } defaultSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() .put(IndexSettings.INDEX_GC_DELETES_SETTING, "1h") // make sure this doesn't kick in on us - .put(EngineConfig.INDEX_CODEC_SETTING, codecName) + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), codecName) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build()); // TODO randomize more settings diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 70e3b66553c..07ae1e70a48 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -49,12 +49,15 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.Collection; import static org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import static org.hamcrest.Matchers.equalTo; @@ -84,6 +87,11 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase { return getForField(type, fieldName, hasDocValues()); } + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public > IFD getForField(FieldDataType type, String fieldName, boolean docValues) { final MappedFieldType fieldType; final BuilderContext context = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java index 13f7f74e37b..35a74ea3849 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataCacheTests.java @@ -106,7 +106,7 @@ public class FieldDataCacheTests extends ESTestCase { .numberOfReplicas(0) .creationDate(System.currentTimeMillis()) .build(); - return new IndexSettings(indexMetaData, settings, Collections.emptyList()); + return new IndexSettings(indexMetaData, settings); } private class DummyAccountingFieldDataCache implements IndexFieldDataCache { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java index eefe8c89183..1e0d8ecdf00 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java @@ -36,6 +36,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; @@ -60,10 +61,10 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { @Before public void before() throws Exception { mapperService.merge( - childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true, false + childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), MapperService.MergeReason.MAPPING_UPDATE, false ); mapperService.merge( - grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true, false + grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), MapperService.MergeReason.MAPPING_UPDATE, false ); Document d = new Document(); @@ -163,11 +164,11 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { } public void testSorting() throws Exception { - IndexFieldData indexFieldData = getForField(childType); + IndexFieldData indexFieldData = getForField(parentType); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); IndexFieldData.XFieldComparatorSource comparator = indexFieldData.comparatorSource("_last", MultiValueMode.MIN, null); - TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.NAME, comparator, false))); + TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, false))); assertThat(topDocs.totalHits, equalTo(8)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); @@ -187,7 +188,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { assertThat(topDocs.scoreDocs[7].doc, equalTo(7)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[7]).fields[0]), equalTo(null)); - topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.NAME, comparator, true))); + topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, true))); assertThat(topDocs.totalHits, equalTo(8)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java index f6cfcec041a..f8859efa025 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java @@ -67,7 +67,7 @@ public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { public void setUp() throws Exception { super.setUp(); settings = Settings.builder() - .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, false) + .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false) .build(); clusterService = new TestClusterService(THREAD_POOL); transport = new LocalTransport(settings, THREAD_POOL, Version.CURRENT, new NamedWriteableRegistry()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 22a10ab8229..1a4fb0d9c4c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -482,7 +482,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endArray() .endObject().endObject(); - indexService.mapperService().merge("type1", new CompressedXContent(mappings1.bytes()), true, false); + indexService.mapperService().merge("type1", new CompressedXContent(mappings1.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); XContentBuilder mappings2 = jsonBuilder().startObject() .startObject("type2") .startObject("properties") @@ -491,7 +491,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject(); - indexService.mapperService().merge("type2", new CompressedXContent(mappings2.bytes()), true, false); + indexService.mapperService().merge("type2", new CompressedXContent(mappings2.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); XContentBuilder json = XContentFactory.jsonBuilder().startObject() .field("field", "foo") @@ -502,7 +502,7 @@ public class DynamicMappingTests extends ESSingleNodeTestCase { ParsedDocument parsed = mapper.parse(source); assertNotNull(parsed.dynamicMappingsUpdate()); - indexService.mapperService().merge("type1", new CompressedXContent(parsed.dynamicMappingsUpdate().toString()), false, false); + indexService.mapperService().merge("type1", new CompressedXContent(parsed.dynamicMappingsUpdate().toString()), MapperService.MergeReason.MAPPING_UPDATE, false); mapper = indexService.mapperService().documentMapper("type1"); assertNotNull(mapper.mappers().getMapper("field.raw")); parsed = mapper.parse(source); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 94445d3aadb..7cc8e10bef7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -20,11 +20,13 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.Queries; @@ -40,6 +42,7 @@ import java.util.HashSet; import java.util.concurrent.ExecutionException; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; @@ -83,15 +86,15 @@ public class MapperServiceTests extends ESSingleNodeTestCase { MapperService mapperService = indexService1.mapperService(); assertEquals(Collections.emptySet(), mapperService.types()); - mapperService.merge("type1", new CompressedXContent("{\"type1\":{}}"), true, false); + mapperService.merge("type1", new CompressedXContent("{\"type1\":{}}"), MapperService.MergeReason.MAPPING_UPDATE, false); assertNull(mapperService.documentMapper(MapperService.DEFAULT_MAPPING)); assertEquals(Collections.singleton("type1"), mapperService.types()); - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent("{\"_default_\":{}}"), true, false); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent("{\"_default_\":{}}"), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotNull(mapperService.documentMapper(MapperService.DEFAULT_MAPPING)); assertEquals(Collections.singleton("type1"), mapperService.types()); - mapperService.merge("type2", new CompressedXContent("{\"type2\":{}}"), true, false); + mapperService.merge("type2", new CompressedXContent("{\"type2\":{}}"), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotNull(mapperService.documentMapper(MapperService.DEFAULT_MAPPING)); assertEquals(new HashSet<>(Arrays.asList("type1", "type2")), mapperService.types()); } @@ -103,7 +106,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase { fail(); } catch (Throwable t) { if (t instanceof ExecutionException) { - t = ((ExecutionException) t).getCause(); + t = t.getCause(); } final Throwable throwable = ExceptionsHelper.unwrapCause(t); if (throwable instanceof IllegalArgumentException) { @@ -120,7 +123,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase { fail(); } catch (Throwable t) { if (t instanceof ExecutionException) { - t = ((ExecutionException) t).getCause(); + t = t.getCause(); } final Throwable throwable = ExceptionsHelper.unwrapCause(t); if (throwable instanceof IllegalArgumentException) { @@ -132,21 +135,4 @@ public class MapperServiceTests extends ESSingleNodeTestCase { assertFalse(indexService.mapperService().hasMapping(MapperService.DEFAULT_MAPPING)); } - public void testSearchFilter() { - IndexService indexService = createIndex("index1", client().admin().indices().prepareCreate("index1") - .addMapping("type1", "field1", "type=nested") - .addMapping("type2", new Object[0]) - ); - - Query searchFilter = indexService.mapperService().searchFilter("type1", "type3"); - Query expectedQuery = new BooleanQuery.Builder() - .add(new BooleanQuery.Builder() - .add(new ConstantScoreQuery(new TermQuery(new Term(TypeFieldMapper.NAME, "type1"))), BooleanClause.Occur.SHOULD) - .add(new TermQuery(new Term(TypeFieldMapper.NAME, "type3")), BooleanClause.Occur.SHOULD) - .build(), BooleanClause.Occur.MUST - ) - .add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST) - .build(); - assertThat(searchFilter, equalTo(new ConstantScoreQuery(expectedQuery))); - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 841249510d1..642e35eef88 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -43,14 +43,16 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.Matchers; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -68,6 +70,11 @@ import static org.hamcrest.Matchers.nullValue; public class SimpleAllMapperTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testSimpleAllMappers() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/mapping.json"); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java index b5a54ce92bd..0a0f746ccd5 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/CustomBoostMappingTests.java @@ -26,7 +26,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.util.Collection; import static org.hamcrest.Matchers.equalTo; @@ -34,6 +38,11 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase { private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testBackCompatCustomBoostValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("s_field").field("type", "string").endObject() @@ -68,4 +77,4 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f)); assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java index bb5aecd9ec9..6b83ae51385 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/boost/FieldLevelBoostTests.java @@ -29,7 +29,11 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext.Document; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.util.Collection; import static org.hamcrest.Matchers.closeTo; @@ -39,6 +43,11 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase { private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testBackCompatFieldLevelBoost() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") .startObject("str_field").field("type", "string").endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java index fa7bbf8f249..ea66eaa4c5c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/compound/CompoundTypesTests.java @@ -26,13 +26,22 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.util.Collection; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; public class CompoundTypesTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); public void testBackCompatStringType() throws Exception { @@ -74,4 +83,4 @@ public class CompoundTypesTests extends ESSingleNodeTestCase { assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(1.0d, 0.000001d)); assertThat(doc.rootDoc().get("field2"), equalTo("value2")); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java index daf54d501d7..d1de843e38f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/copyto/CopyToMapperTests.java @@ -312,11 +312,11 @@ public class CopyToMapperTests extends ESSingleNodeTestCase { .endObject().endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), true, false); + DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore), MapperService.MergeReason.MAPPING_UPDATE, false); assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); - DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter), false, false); + DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter), MapperService.MergeReason.MAPPING_UPDATE, false); assertEquals(Arrays.asList("baz", "bar"), docMapperAfter.mappers().getMapper("copy_test").copyTo().copyToFields()); assertEquals(Arrays.asList("foo", "bar"), docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java index 3aa04ba0f01..2175f2ce3e7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/BooleanFieldMapperTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; @@ -125,7 +126,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject() .endObject().endObject().string(); - DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), true, false); + DocumentMapper mapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertEquals(mapping, mapper.mappingSource().toString()); BytesReference source = XContentFactory.jsonBuilder() .startObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java index a746717b73a..e8fe39e9bbb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperTests.java @@ -52,7 +52,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), true, false); + DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject() .startObject("person") @@ -63,7 +63,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { .endObject() .endObject() .endObject().endObject().string(); - DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), false, false); + DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); // previous mapper has not been modified assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java index ba05ea81054..c42924132ff 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/SimpleExternalMappingTests.java @@ -32,9 +32,12 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -46,6 +49,11 @@ import static org.hamcrest.Matchers.notNullValue; */ public class SimpleExternalMappingTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testExternalValues() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 758e5a38294..2ea19b02450 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -35,11 +35,14 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.geo.RandomGeoGenerator; +import java.util.Collection; import java.util.List; import java.util.Map; @@ -52,6 +55,12 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testLatLonValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() @@ -662,12 +671,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); MapperService mapperService = createIndex("test", settings).mapperService(); - DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false); + DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false) .field("geohash", false).endObject().endObject().endObject().endObject().string(); try { - mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]")); @@ -679,7 +688,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) .field("geohash", true).endObject().endObject().endObject().endObject().string(); - mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); } public void testGeoHashSearch() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java index af03d3accb1..0ad4dbd87ba 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java @@ -376,13 +376,13 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01).field("orientation", "ccw") .endObject().endObject().endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), true, false); + DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("tree", "quadtree") .field("strategy", "term").field("precision", "1km").field("tree_levels", 26).field("distance_error_pct", 26) .field("orientation", "cw").endObject().endObject().endObject().endObject().string(); try { - mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [shape] has different [strategy]")); @@ -408,7 +408,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m") .field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string(); - docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), false, false); + docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); fieldMapper = docMapper.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java index 9e0d7b596eb..c2cbee42cf2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java @@ -29,9 +29,13 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; +import java.util.Collection; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -42,6 +46,12 @@ import static org.hamcrest.Matchers.nullValue; * */ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testLatLonValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java index 2e28f60e7d9..e23ea15b64f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java @@ -148,11 +148,11 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); - DocumentMapper mapperDisabled = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); + DocumentMapper mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper mapperDisabled = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertFalse(mapperDisabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); - mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), false, false); + mapperEnabled = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertTrue(mapperEnabled.metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java index 0d52b66dfb6..7083d9fa6bc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/ParentFieldMapperTests.java @@ -47,10 +47,10 @@ public class ParentFieldMapperTests extends ESTestCase { assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent")); - assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.LAZY)); - assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); + assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent")); + assertThat(parentFieldMapper.fieldType().fieldDataType().getLoading(), equalTo(Loading.LAZY)); + assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true)); + assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED)); } public void testPost2Dot0EagerLoading() { @@ -65,10 +65,10 @@ public class ParentFieldMapperTests extends ESTestCase { assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent")); - assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER)); - assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); + assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent")); + assertThat(parentFieldMapper.fieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER)); + assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true)); + assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED)); } public void testPost2Dot0EagerGlobalOrdinalsLoading() { @@ -83,10 +83,10 @@ public class ParentFieldMapperTests extends ESTestCase { assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent")); - assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER_GLOBAL_ORDINALS)); - assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); + assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent")); + assertThat(parentFieldMapper.fieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER_GLOBAL_ORDINALS)); + assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true)); + assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED)); } private static Settings post2Dot0IndexSettings() { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java index 309fa274919..77e7b46537e 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/internal/TypeFieldMapperTests.java @@ -25,10 +25,19 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.util.Collection; public class TypeFieldMapperTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testDocValues() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java index 82a8918c66b..5886b3e486f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ip/SimpleIpMappingTests.java @@ -38,6 +38,7 @@ import static org.hamcrest.Matchers.nullValue; * */ public class SimpleIpMappingTests extends ESSingleNodeTestCase { + public void testSimpleMapping() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("ip").field("type", "ip").endObject().endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index 80f7942bbcc..4614909568c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.io.IOException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -135,8 +136,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").field("analyzer", "standard").field("ignore_above", 14).endObject().endObject() .endObject().endObject().string(); - DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), true, false); - DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), false, false); + DocumentMapper existing = mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper merged = mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE, false); assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); @@ -146,7 +147,7 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { public void testConcurrentMergeTest() throws Throwable { final MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), true, false); + mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), MapperService.MergeReason.MAPPING_UPDATE, false); final DocumentMapper documentMapper = mapperService.documentMapper("test"); DocumentFieldMappers dfm = documentMapper.mappers(); @@ -172,7 +173,7 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { Mapping update = doc.dynamicMappingsUpdate(); assert update != null; lastIntroducedFieldName.set(fieldName); - mapperService.merge("test", new CompressedXContent(update.toString()), false, false); + mapperService.merge("test", new CompressedXContent(update.toString()), MapperService.MergeReason.MAPPING_UPDATE, false); } } catch (Throwable t) { error.set(t); @@ -203,4 +204,28 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { throw error.get(); } } + + public void testDoNotRepeatOriginalMapping() throws IOException { + CompressedXContent mapping = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("_source") + .field("enabled", false) + .endObject() + .endObject().endObject().bytes()); + MapperService mapperService = createIndex("test").mapperService(); + mapperService.merge("type", mapping, MapperService.MergeReason.MAPPING_UPDATE, false); + + CompressedXContent update = new CompressedXContent(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("foo") + .field("type", "string") + .endObject() + .endObject() + .endObject().endObject().bytes()); + DocumentMapper mapper = mapperService.merge("type", update, MapperService.MergeReason.MAPPING_UPDATE, false); + + assertNotNull(mapper.mappers().getMapper("foo")); + assertFalse(mapper.sourceMapper().enabled()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java index 651b8c45d55..e08d610d3f2 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/multifield/merge/JavaMultiFieldMergeTests.java @@ -42,7 +42,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), true, false); + DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); @@ -55,7 +55,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -72,7 +72,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -83,7 +83,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(docMapper.mappers().getMapper("name.not_indexed3"), nullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -98,7 +98,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping1.json"); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), true, false); + DocumentMapper docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.indexed"), nullValue()); @@ -112,7 +112,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -129,7 +129,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { assertThat(f, notNullValue()); mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json"); - docMapper = mapperService.merge("person", new CompressedXContent(mapping), false, false); + docMapper = mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions()); @@ -142,7 +142,7 @@ public class JavaMultiFieldMergeTests extends ESSingleNodeTestCase { mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json"); try { - mapperService.merge("person", new CompressedXContent(mapping), false, false); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [name] has different [index] values")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java index 6debfa05ee9..5c846b09ab9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/nested/NestedMappingTests.java @@ -20,14 +20,22 @@ package org.elasticsearch.index.mapper.nested; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper.Dynamic; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.function.Function; + +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -340,4 +348,58 @@ public class NestedMappingTests extends ESSingleNodeTestCase { assertThat(doc.docs().get(1).get("field"), nullValue()); assertThat(doc.docs().get(2).get("field"), equalTo("value")); } -} \ No newline at end of file + + public void testLimitOfNestedFieldsPerIndex() throws Exception { + Function mapping = type -> { + try { + return XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") + .startObject("nested1").field("type", "nested").startObject("properties") + .startObject("nested2").field("type", "nested") + .endObject().endObject() + .endObject().endObject().endObject().string(); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }; + + // default limit allows at least two nested fields + createIndex("test1").mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false); + + // explicitly setting limit to 0 prevents nested fields + try { + createIndex("test2", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build()) + .mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Limit of nested fields [0] in index [test2] has been exceeded")); + } + + // setting limit to 1 with 2 nested fields fails + try { + createIndex("test3", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 1).build()) + .mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_UPDATE, false); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Limit of nested fields [1] in index [test3] has been exceeded")); + } + + MapperService mapperService = createIndex("test4", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 2) + .build()).mapperService(); + mapperService.merge("type1", new CompressedXContent(mapping.apply("type1")), MergeReason.MAPPING_UPDATE, false); + // merging same fields, but different type is ok + mapperService.merge("type2", new CompressedXContent(mapping.apply("type2")), MergeReason.MAPPING_UPDATE, false); + // adding new fields from different type is not ok + String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type3").startObject("properties").startObject("nested3") + .field("type", "nested").startObject("properties").endObject().endObject().endObject().endObject().string(); + try { + mapperService.merge("type3", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE, false); + fail("Expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("Limit of nested fields [2] in index [test4] has been exceeded")); + } + + // do not check nested fields limit if mapping is not updated + createIndex("test5", Settings.builder().put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build()) + .mapperService().merge("type", new CompressedXContent(mapping.apply("type")), MergeReason.MAPPING_RECOVERY, false); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java index 624978bf7d0..e68817e9ea0 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/numeric/SimpleNumericTests.java @@ -41,10 +41,13 @@ import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.string.SimpleStringMappingTests; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.util.Arrays; +import java.util.Collection; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -56,6 +59,12 @@ import static org.hamcrest.Matchers.nullValue; /** */ public class SimpleNumericTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testNumericDetectionEnabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .field("numeric_detection", true) diff --git a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java index f6bbde47e9d..41dd8d957b0 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java @@ -55,6 +55,6 @@ public class ParentMappingTests extends ESSingleNodeTestCase { .endObject() .bytes()).type("type").id("1").parent("1122")); - assertEquals(Uid.createUid("p_type", "1122"), doc.rootDoc().get("_parent")); + assertEquals("1122", doc.rootDoc().getBinaryValue("_parent#p_type").utf8ToString()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index 35b127b6283..f82dcb6eecc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -33,10 +33,13 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.io.IOException; +import java.util.Collection; import java.util.Map; import static org.hamcrest.Matchers.containsString; @@ -44,6 +47,11 @@ import static org.hamcrest.Matchers.equalTo; public class DefaultSourceMappingTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testNoFormat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").endObject() @@ -167,7 +175,7 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true, false); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), MapperService.MergeReason.MAPPING_UPDATE, false); DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").getDocumentMapper(); assertThat(mapper.type(), equalTo("my_type")); @@ -180,12 +188,12 @@ public class DefaultSourceMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true, false); + mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), MapperService.MergeReason.MAPPING_UPDATE, false); String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") .startObject("_source").field("enabled", true).endObject() .endObject().endObject().string(); - mapperService.merge("my_type", new CompressedXContent(mapping), true, false); + mapperService.merge("my_type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); DocumentMapper mapper = mapperService.documentMapper("my_type"); assertThat(mapper.type(), equalTo("my_type")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 7bd4d9a78c3..d32dcad5434 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -480,7 +480,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = indexService.mapperService(); - DocumentMapper defaultMapper = mapperService.merge("type", new CompressedXContent(mapping), true, false); + DocumentMapper defaultMapper = mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -494,7 +494,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject() .endObject().endObject().endObject().endObject().string(); - defaultMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); + defaultMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -509,7 +509,7 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject() .endObject().endObject().endObject().endObject().string(); try { - mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); + mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("different [omit_norms]")); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index 51ef9ff0024..ed58bb63b65 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -36,17 +36,20 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Arrays; +import java.util.Collection; + import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.elasticsearch.test.VersionUtils.randomVersionBetween; @@ -62,6 +65,11 @@ import static org.hamcrest.Matchers.notNullValue; */ public class TimestampMappingTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testSimpleDisabled() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string(); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -115,12 +123,12 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { .startObject("_timestamp").field("enabled", true).endObject() .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); + DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); + DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertThat(enabledMapper.timestampFieldMapper().enabled(), is(true)); assertThat(disabledMapper.timestampFieldMapper().enabled(), is(false)); @@ -366,9 +374,9 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { } void assertConflict(MapperService mapperService, String type, String mapping1, String mapping2, String conflict) throws IOException { - mapperService.merge("type", new CompressedXContent(mapping1), true, false); + mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE, false); try { - mapperService.merge("type", new CompressedXContent(mapping2), false, false); + mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE, false); assertNull(conflict); } catch (IllegalArgumentException e) { assertNotNull(conflict); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java index fa27e9bcfb7..335d3a06e32 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ttl/TTLMappingTests.java @@ -22,10 +22,6 @@ package org.elasticsearch.index.mapper.ttl; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -99,8 +95,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), true, false); - DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), false, false); + DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(false)); assertThat(mapperWithTtl.TTLFieldMapper().enabled(), equalTo(true)); @@ -122,8 +118,8 @@ public class TTLMappingTests extends ESSingleNodeTestCase { .endObject().endObject().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), true, false); - DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), false, false); + DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); + DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); assertThat(updatedMapper.TTLFieldMapper().enabled(), equalTo(true)); @@ -133,10 +129,10 @@ public class TTLMappingTests extends ESSingleNodeTestCase { String mappingWithTtl = getMappingWithTtlEnabled().string(); String mappingWithTtlDisabled = getMappingWithTtlDisabled().string(); MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), true, false); + DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); try { - mapperService.merge("type", new CompressedXContent(mappingWithTtlDisabled), false, false); + mapperService.merge("type", new CompressedXContent(mappingWithTtlDisabled), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -174,20 +170,20 @@ public class TTLMappingTests extends ESSingleNodeTestCase { public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), randomBoolean(), false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false); } public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type"); XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), randomBoolean(), false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false); } public void testMergeWithOnlyDefaultSet() throws Exception { XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), false, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), MapperService.MergeReason.MAPPING_UPDATE, false); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } @@ -198,7 +194,7 @@ public class TTLMappingTests extends ESSingleNodeTestCase { CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), false, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), MapperService.MergeReason.MAPPING_UPDATE, false); CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}"))); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index a53295d7fea..4e65c4a5ab7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -30,9 +30,12 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.util.Collection; import java.util.LinkedHashMap; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; @@ -41,6 +44,11 @@ import static org.hamcrest.CoreMatchers.equalTo; public class UpdateMappingTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testAllEnabledAfterDisabled() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", false).endObject().endObject(); XContentBuilder mappingUpdate = XContentFactory.jsonBuilder().startObject().startObject("_all").field("enabled", true).endObject().startObject("properties").startObject("text").field("type", "string").endObject().endObject().endObject(); @@ -76,7 +84,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException { IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping); // simulate like in MetaDataMappingService#putMapping - indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), false, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); // make sure mappings applied CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); assertThat(mappingAfterUpdate.toString(), equalTo(expectedMapping.string())); @@ -99,7 +107,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); // simulate like in MetaDataMappingService#putMapping try { - indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), true, false); + indexService.mapperService().merge("type", new CompressedXContent(mappingUpdate.bytes()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -120,14 +128,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { .endObject().endObject().endObject(); try { - mapperService.merge("type", new CompressedXContent(update.string()), false, false); + mapperService.merge("type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]")); } try { - mapperService.merge("type", new CompressedXContent(update.string()), false, false); + mapperService.merge("type", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("mapper [foo] of different type, current_type [long], merged_type [double]")); @@ -147,7 +155,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { .endObject().endObject().endObject(); try { - mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -155,7 +163,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { } try { - mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -174,15 +182,15 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { XContentBuilder mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject(); MapperService mapperService = createIndex("test", Settings.settingsBuilder().build()).mapperService(); - mapperService.merge("type1", new CompressedXContent(mapping1.string()), false, false); - mapperService.merge("type2", new CompressedXContent(mapping2.string()), false, false); + mapperService.merge("type1", new CompressedXContent(mapping1.string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge("type2", new CompressedXContent(mapping2.string()), MapperService.MergeReason.MAPPING_UPDATE, false); XContentBuilder update = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties").startObject("foo").field("type", "double").endObject() .endObject().endObject().endObject(); try { - mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -190,7 +198,7 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { } try { - mapperService.merge("type2", new CompressedXContent(update.string()), false, false); + mapperService.merge("type2", new CompressedXContent(update.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { // expected @@ -209,14 +217,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { MapperService mapperService = createIndex("test", Settings.settingsBuilder().build()).mapperService(); try { - mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); } try { - mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); @@ -233,14 +241,14 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { MapperService mapperService = createIndex("test", settings).mapperService(); try { - mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); } try { - mapperService.merge("type", new CompressedXContent(mapping.string()), false, false); + mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); fail(); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); diff --git a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java index 5ce841540d1..9e46ce95041 100644 --- a/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/percolator/PercolatorFieldMapperTests.java @@ -46,12 +46,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { String mapper = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "string").endObject().endObject() .endObject().endObject().string(); - mapperService.merge("type", new CompressedXContent(mapper), true, true); + mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, true); String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(PercolatorService.TYPE_NAME) .startObject("properties").startObject("query").field("type", "percolator").endObject().endObject() .endObject().endObject().string(); - mapperService.merge(PercolatorService.TYPE_NAME, new CompressedXContent(percolatorMapper), true, true); + mapperService.merge(PercolatorService.TYPE_NAME, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); } public void testPercolatorFieldMapper() throws Exception { @@ -85,7 +85,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject("query").field("type", "percolator").field("index", "no").endObject().endObject() .endObject().endObject().string(); try { - mapperService.merge(PercolatorService.TYPE_NAME, new CompressedXContent(percolatorMapper), true, true); + mapperService.merge(PercolatorService.TYPE_NAME, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); fail("MapperParsingException expected"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("Mapping definition for [query] has unsupported parameters: [index : no]")); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index b88d0be23b6..8c6bfff3fac 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; + import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -46,7 +47,6 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.geo.builders.ShapeBuilderRegistry; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; @@ -74,8 +74,6 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; -import org.elasticsearch.index.query.functionscore.ScoreFunctionParserMapper; import org.elasticsearch.index.query.support.QueryParsers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; @@ -93,9 +91,11 @@ import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.cluster.TestClusterService; @@ -186,7 +186,7 @@ public abstract class AbstractQueryTestCase> Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.CURRENT); Settings settings = Settings.settingsBuilder() .put("name", AbstractQueryTestCase.class.toString()) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) .build(); Settings indexSettings = Settings.settingsBuilder() @@ -196,28 +196,29 @@ public abstract class AbstractQueryTestCase> final TestClusterService clusterService = new TestClusterService(); clusterService.setState(new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder().put( new IndexMetaData.Builder(index.name()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0)))); + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); final Client proxy = (Client) Proxy.newProxyInstance( Client.class.getClassLoader(), new Class[]{Client.class}, clientInvocationHandler); + namedWriteableRegistry = new NamedWriteableRegistry(); injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), - new SettingsModule(settings, new SettingsFilter(settings)), + settingsModule, new ThreadPoolModule(new ThreadPool(settings)), new IndicesModule() { @Override public void configure() { // skip services - bindQueryParsersExtension(); bindMapperExtension(); - bind(ShapeBuilderRegistry.class).asEagerSingleton(); } }, new ScriptModule(settings) { @Override protected void configure() { Settings settings = Settings.builder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) // no file watching, so we don't need a ResourceWatcherService .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) .build(); @@ -234,20 +235,26 @@ public abstract class AbstractQueryTestCase> } catch(IOException e) { throw new IllegalStateException("error while binding ScriptService", e); } - - } }, new IndexSettingsModule(index, indexSettings), + new SearchModule(settings, namedWriteableRegistry) { + @Override + protected void configureSearch() { + // Skip me + } + @Override + protected void configureSuggesters() { + // Skip me + } + }, new AbstractModule() { @Override protected void configure() { bind(Client.class).toInstance(proxy); - Multibinder.newSetBinder(binder(), ScoreFunctionParser.class); - bind(ScoreFunctionParserMapper.class).asEagerSingleton(); bind(ClusterService.class).toProvider(Providers.of(clusterService)); bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); - bind(NamedWriteableRegistry.class).asEagerSingleton(); + bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); } } ).createInjector(); @@ -284,10 +291,11 @@ public abstract class AbstractQueryTestCase> OBJECT_FIELD_NAME, "type=object", GEO_POINT_FIELD_NAME, "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true", GEO_SHAPE_FIELD_NAME, "type=geo_shape" - ).string()), false, false); + ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); // also add mappings for two inner field in the object field mapperService.merge(type, new CompressedXContent("{\"properties\":{\""+OBJECT_FIELD_NAME+"\":{\"type\":\"object\"," - + "\"properties\":{\""+DATE_FIELD_NAME+"\":{\"type\":\"date\"},\""+INT_FIELD_NAME+"\":{\"type\":\"integer\"}}}}}"), false, false); + + "\"properties\":{\""+DATE_FIELD_NAME+"\":{\"type\":\"date\"},\""+INT_FIELD_NAME+"\":{\"type\":\"integer\"}}}}}"), + MapperService.MergeReason.MAPPING_UPDATE, false); currentTypes[i] = type; } namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class); @@ -483,7 +491,7 @@ public abstract class AbstractQueryTestCase> QueryParseContext context = createParseContext(); context.reset(parser); context.parseFieldMatcher(matcher); - QueryBuilder parseInnerQueryBuilder = context.parseInnerQueryBuilder(); + QueryBuilder parseInnerQueryBuilder = context.parseInnerQueryBuilder(); assertTrue(parser.nextToken() == null); return parseInnerQueryBuilder; } @@ -613,7 +621,7 @@ public abstract class AbstractQueryTestCase> testQuery.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { QueryBuilder prototype = queryParser(testQuery.getName()).getBuilderPrototype(); - QueryBuilder deserializedQuery = prototype.readFrom(in); + QueryBuilder deserializedQuery = prototype.readFrom(in); assertEquals(deserializedQuery, testQuery); assertEquals(deserializedQuery.hashCode(), testQuery.hashCode()); assertNotSame(deserializedQuery, testQuery); @@ -808,12 +816,6 @@ public abstract class AbstractQueryTestCase> } protected static Fuzziness randomFuzziness(String fieldName) { - if (randomBoolean()) { - return Fuzziness.fromEdits(randomIntBetween(0, 2)); - } - if (randomBoolean()) { - return Fuzziness.AUTO; - } switch (fieldName) { case INT_FIELD_NAME: return Fuzziness.build(randomIntBetween(3, 100)); @@ -822,6 +824,9 @@ public abstract class AbstractQueryTestCase> case DATE_FIELD_NAME: return Fuzziness.build(randomTimeValue()); default: + if (randomBoolean()) { + return Fuzziness.fromEdits(randomIntBetween(0, 2)); + } return Fuzziness.AUTO; } } @@ -845,7 +850,7 @@ public abstract class AbstractQueryTestCase> private static final List TIMEZONE_IDS = new ArrayList<>(DateTimeZone.getAvailableIDs()); private static class ClientInvocationHandler implements InvocationHandler { - AbstractQueryTestCase delegate; + AbstractQueryTestCase delegate; @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { if (method.equals(Client.class.getMethod("get", GetRequest.class))) { diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 1c407fbaa0e..30dbcdf4b60 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -106,16 +106,16 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase clauseIterator = clauses.iterator(); - for (BooleanClause booleanClause : booleanQuery.getClauses()) { + for (BooleanClause booleanClause : booleanQuery.clauses()) { assertThat(booleanClause, instanceOf(clauseIterator.next().getClass())); } } } } - private static List getBooleanClauses(List queryBuilders, BooleanClause.Occur occur, QueryShardContext context) throws IOException { + private static List getBooleanClauses(List> queryBuilders, BooleanClause.Occur occur, QueryShardContext context) throws IOException { List clauses = new ArrayList<>(); - for (QueryBuilder query : queryBuilders) { + for (QueryBuilder query : queryBuilders) { Query innerQuery = query.toQuery(context); if (innerQuery != null) { clauses.add(new BooleanClause(innerQuery, occur)); @@ -132,22 +132,22 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase 0) { - QueryBuilder must = tempQueryBuilder.must().get(0); + QueryBuilder must = tempQueryBuilder.must().get(0); contentString += "must: " + must.toString() + ","; expectedQuery.must(must); } if (tempQueryBuilder.mustNot().size() > 0) { - QueryBuilder mustNot = tempQueryBuilder.mustNot().get(0); + QueryBuilder mustNot = tempQueryBuilder.mustNot().get(0); contentString += (randomBoolean() ? "must_not: " : "mustNot: ") + mustNot.toString() + ","; expectedQuery.mustNot(mustNot); } if (tempQueryBuilder.should().size() > 0) { - QueryBuilder should = tempQueryBuilder.should().get(0); + QueryBuilder should = tempQueryBuilder.should().get(0); contentString += "should: " + should.toString() + ","; expectedQuery.should(should); } if (tempQueryBuilder.filter().size() > 0) { - QueryBuilder filter = tempQueryBuilder.filter().get(0); + QueryBuilder filter = tempQueryBuilder.filter().get(0); contentString += "filter: " + filter.toString() + ","; expectedQuery.filter(filter); } @@ -272,6 +272,17 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase getAlternateVersions() { Map alternateVersions = new HashMap<>(); - QueryBuilder innerQuery = createTestQueryBuilder().innerQueries().get(0); + QueryBuilder innerQuery = createTestQueryBuilder().innerQueries().get(0); DisMaxQueryBuilder expectedQuery = new DisMaxQueryBuilder(); expectedQuery.add(innerQuery); String contentString = "{\n" + diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java index f2b3a1a5026..15d37150b3f 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -45,6 +45,7 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.TestSearchContext; +import org.junit.BeforeClass; import java.io.IOException; import java.util.Collections; @@ -57,9 +58,8 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase termLcMatcher = equalTo(originalTermLc); + if ("false".equals(originalTermLc) || "true".equals(originalTermLc)) { + // Booleans become t/f when querying a boolean field + termLcMatcher = either(termLcMatcher).or(equalTo(originalTermLc.substring(0, 1))); + } + assertThat(actualTermLc, termLcMatcher); assertThat(queryBuilder.prefixLength(), equalTo(fuzzyQuery.getPrefixLength())); assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions())); } + + if (query instanceof NumericRangeQuery) { + // These are fuzzy numeric queries + assertTrue(queryBuilder.fuzziness() != null); + @SuppressWarnings("unchecked") + NumericRangeQuery numericRangeQuery = (NumericRangeQuery) query; + assertTrue(numericRangeQuery.includesMin()); + assertTrue(numericRangeQuery.includesMax()); + + double value; + double width = 0; + try { + value = Double.parseDouble(queryBuilder.value().toString()); + } catch (NumberFormatException e) { + // Maybe its a date + value = ISODateTimeFormat.dateTimeParser().parseMillis(queryBuilder.value().toString()); + width = queryBuilder.fuzziness().asTimeValue().getMillis(); + } + + if (width == 0) { + if (queryBuilder.fuzziness().equals(Fuzziness.AUTO)) { + width = 1; + } else { + width = queryBuilder.fuzziness().asDouble(); + } + } + assertEquals(value - width, numericRangeQuery.getMin().doubleValue(), width * .1); + assertEquals(value + width, numericRangeQuery.getMax().doubleValue(), width * .1); + } } public void testIllegalValues() { diff --git a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java index 36c4f328453..a4af84a8f79 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MultiMatchQueryBuilderTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -132,7 +133,8 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase { + + protected static final String PARENT_TYPE = "parent"; + protected static final String CHILD_TYPE = "child"; + + @Override + public void setUp() throws Exception { + super.setUp(); + MapperService mapperService = queryShardContext().getMapperService(); + mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, + STRING_FIELD_NAME, "type=string", + INT_FIELD_NAME, "type=integer", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + OBJECT_FIELD_NAME, "type=object" + ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, + "_parent", "type=" + PARENT_TYPE, + STRING_FIELD_NAME, "type=string", + INT_FIELD_NAME, "type=integer", + DOUBLE_FIELD_NAME, "type=double", + BOOLEAN_FIELD_NAME, "type=boolean", + DATE_FIELD_NAME, "type=date", + OBJECT_FIELD_NAME, "type=object" + ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); + } + + @Override + protected void setSearchContext(String[] types) { + final MapperService mapperService = queryShardContext().getMapperService(); + final IndexFieldDataService fieldData = indexFieldDataService(); + TestSearchContext testSearchContext = new TestSearchContext() { + private InnerHitsContext context; + + + @Override + public void innerHits(InnerHitsContext innerHitsContext) { + context = innerHitsContext; + } + + @Override + public InnerHitsContext innerHits() { + return context; + } + + @Override + public MapperService mapperService() { + return mapperService; // need to build / parse inner hits sort fields + } + + @Override + public IndexFieldDataService fieldData() { + return fieldData; // need to build / parse inner hits sort fields + } + }; + testSearchContext.setTypes(types); + SearchContext.setCurrent(testSearchContext); + } + + @Override + protected ParentIdQueryBuilder doCreateTestQueryBuilder() { + return new ParentIdQueryBuilder(CHILD_TYPE, randomAsciiOfLength(4)); + } + + @Override + protected void doAssertLuceneQuery(ParentIdQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { + assertThat(query, Matchers.instanceOf(DocValuesTermsQuery.class)); + DocValuesTermsQuery termsQuery = (DocValuesTermsQuery) query; + // there are no getters to get the field and terms on DocValuesTermsQuery, so lets validate by creating a + // new query based on the builder: + assertThat(termsQuery, Matchers.equalTo(new DocValuesTermsQuery("_parent#" + PARENT_TYPE, queryBuilder.getId()))); + } + + public void testFromJson() throws IOException { + String query = + "{\n" + + " \"parent_id\" : {\n" + + " \"type\" : \"child\",\n" + + " \"id\" : \"123\",\n" + + " \"boost\" : 3.0,\n" + + " \"_name\" : \"name\"" + + " }\n" + + "}"; + ParentIdQueryBuilder queryBuilder = (ParentIdQueryBuilder) parseQuery(query); + checkGeneratedJson(query, queryBuilder); + assertThat(queryBuilder.getType(), Matchers.equalTo("child")); + assertThat(queryBuilder.getId(), Matchers.equalTo("123")); + assertThat(queryBuilder.boost(), Matchers.equalTo(3f)); + assertThat(queryBuilder.queryName(), Matchers.equalTo("name")); + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 491a488d5de..190824a1ced 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -288,7 +288,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase field = fieldsIterator.next(); assertTermOrBoostQuery(booleanClause.getQuery(), field.getKey(), queryBuilder.value(), field.getValue()); } - if (queryBuilder.minimumShouldMatch() != null) { + if (queryBuilder.minimumShouldMatch() != null && !boolQuery.isCoordDisabled()) { assertThat(boolQuery.getMinimumNumberShouldMatch(), greaterThan(0)); } } else if (queryBuilder.fields().size() == 1) { diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java index dc8d096f688..3b8ae51559e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanNearQueryBuilderTests.java @@ -50,7 +50,7 @@ public class SpanNearQueryBuilderTests extends AbstractQueryTestCase spanQueryBuilderIterator = queryBuilder.clauses().iterator(); + Iterator> spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanNearQuery.getClauses()) { assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } diff --git a/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java index 7a8b66139e3..c3d7be9ae9e 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SpanOrQueryBuilderTests.java @@ -45,7 +45,7 @@ public class SpanOrQueryBuilderTests extends AbstractQueryTestCase spanQueryBuilderIterator = queryBuilder.clauses().iterator(); + Iterator> spanQueryBuilderIterator = queryBuilder.clauses().iterator(); for (SpanQuery spanQuery : spanOrQuery.getClauses()) { assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } @@ -70,32 +70,32 @@ public class SpanOrQueryBuilderTests extends AbstractQueryTestCase)queryBuilder).buildAsBytes(XContentType.values()[i])); } } } @@ -414,7 +415,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase)queryBuilder).buildAsBytes(XContentType.values()[i])); } } } diff --git a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java index c72470c0cce..98dbf67de57 100644 --- a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java +++ b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java @@ -19,6 +19,8 @@ package org.elasticsearch.index.query.plugin; +import java.io.IOException; + import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -31,10 +33,8 @@ import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.plugins.Plugin; - -import java.io.IOException; +import org.elasticsearch.search.SearchModule; public class DummyQueryParserPlugin extends Plugin { @@ -48,8 +48,8 @@ public class DummyQueryParserPlugin extends Plugin { return "dummy query"; } - public void onModule(IndicesModule module) { - module.registerQueryParser(DummyQueryParser.class); + public void onModule(SearchModule module) { + module.registerQueryParser(DummyQueryParser::new); } public static class DummyQueryBuilder extends AbstractQueryBuilder { diff --git a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index 236198261d7..46c8d3745ca 100644 --- a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -64,7 +64,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { " }\n" + " }\n" + "}"; - mapperService.merge("person", new CompressedXContent(mapping), true, false); + mapperService.merge("person", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); this.indexService = indexService; } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 446f5cdd7f9..fd32091d891 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -82,17 +82,18 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.search.stats.SearchSlowLog; import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveryState; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.io.IOException; @@ -101,6 +102,7 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -128,6 +130,11 @@ import static org.hamcrest.Matchers.equalTo; */ public class IndexShardTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testWriteShardState() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { ShardId id = new ShardId("foo", 1); @@ -400,7 +407,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } private void setDurability(IndexShard shard, Translog.Durability durability) { - client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, durability.name()).build()).get(); + client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), durability.name()).build()).get(); assertEquals(durability, shard.getTranslogDurability()); } @@ -692,13 +699,13 @@ public class IndexShardTests extends ESSingleNodeTestCase { } public void testMaybeFlush() throws Exception { - createIndex("test", settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.REQUEST).build()); + createIndex("test", settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.REQUEST).build()); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService("test"); IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); assertFalse(shard.shouldFlush()); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null); @@ -714,7 +721,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { shard.getEngine().getTranslog().sync(); long size = shard.getEngine().getTranslog().sizeInBytes(); logger.info("--> current translog size: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(size, ByteSizeUnit.BYTES)) + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(size, ByteSizeUnit.BYTES)) .build()).get(); client().prepareDelete("test", "test", "2").get(); logger.info("--> translog size after delete: [{}] num_ops [{}] generation [{}]", shard.getEngine().getTranslog().sizeInBytes(), shard.getEngine().getTranslog().totalOperations(), shard.getEngine().getTranslog().getGeneration()); @@ -732,7 +739,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexService test = indicesService.indexService("test"); final IndexShard shard = test.getShardOrNull(0); assertFalse(shard.shouldFlush()); - client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); + client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefresh(randomBoolean()).get(); assertFalse(shard.shouldFlush()); final AtomicBoolean running = new AtomicBoolean(true); diff --git a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java index c3a2d65748c..911f2598f03 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java @@ -167,8 +167,8 @@ public class NewPathForShardTests extends ESTestCase { path.resolve("b").toString()}; Settings settings = Settings.builder() - .put("path.home", path) - .putArray("path.data", paths).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), path) + .putArray(Environment.PATH_DATA_SETTING.getKey(), paths).build(); NodeEnvironment nodeEnv = new NodeEnvironment(settings, new Environment(settings)); // Make sure all our mocking above actually worked: diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java index 5a82a8942aa..80d5f4c8fe9 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -118,7 +119,7 @@ public class ShardPathTests extends ESTestCase { final Path path = createTempDir(); final boolean includeNodeId = randomBoolean(); indexSetttings = indexSettingsBuilder.put(IndexMetaData.SETTING_DATA_PATH, "custom").build(); - nodeSettings = settingsBuilder().put("path.shared_data", path.toAbsolutePath().toAbsolutePath()) + nodeSettings = settingsBuilder().put(Environment.PATH_SHARED_DATA_SETTING.getKey(), path.toAbsolutePath().toAbsolutePath()) .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH, includeNodeId).build(); if (includeNodeId) { customPath = path.resolve("custom").resolve("0"); diff --git a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index dd5ca6bcc51..926ea49ba4b 100644 --- a/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/core/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.similarity; import org.apache.lucene.search.similarities.ClassicSimilarity; +import org.apache.lucene.search.similarities.DFISimilarity; import org.apache.lucene.search.similarities.AfterEffectL; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.BasicModelG; @@ -38,16 +39,26 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.io.IOException; +import java.util.Collection; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; public class SimilarityTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + public void testResolveDefaultSimilarities() { SimilarityService similarityService = createIndex("foo").similarityService(); assertThat(similarityService.getSimilarity("classic").get(), instanceOf(ClassicSimilarity.class)); @@ -84,7 +95,7 @@ public class SimilarityTests extends ESSingleNodeTestCase { Settings indexSettings = Settings.settingsBuilder() .put("index.similarity.my_similarity.type", "BM25") .put("index.similarity.my_similarity.k1", 2.0f) - .put("index.similarity.my_similarity.b", 1.5f) + .put("index.similarity.my_similarity.b", 0.5f) .put("index.similarity.my_similarity.discount_overlaps", false) .build(); IndexService indexService = createIndex("foo", indexSettings); @@ -93,7 +104,7 @@ public class SimilarityTests extends ESSingleNodeTestCase { BM25Similarity similarity = (BM25Similarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getK1(), equalTo(2.0f)); - assertThat(similarity.getB(), equalTo(1.5f)); + assertThat(similarity.getB(), equalTo(0.5f)); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); } @@ -147,6 +158,23 @@ public class SimilarityTests extends ESSingleNodeTestCase { assertThat(((NormalizationH2) similarity.getNormalization()).getC(), equalTo(3f)); } + public void testResolveSimilaritiesFromMapping_DFI() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field1").field("type", "string").field("similarity", "my_similarity").endObject() + .endObject() + .endObject().endObject().string(); + + Settings indexSettings = Settings.settingsBuilder() + .put("index.similarity.my_similarity.type", "DFI") + .build(); + IndexService indexService = createIndex("foo", indexSettings); + DocumentMapper documentMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); + MappedFieldType fieldType = documentMapper.mappers().getMapper("field1").fieldType(); + assertThat(fieldType.similarity(), instanceOf(DFISimilarityProvider.class)); + assertThat(fieldType.similarity().get(), instanceOf(DFISimilarity.class)); + } + public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index f7fbe3dc8a8..2c9de235b7d 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -62,6 +62,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.MockIndexEventListener; import org.elasticsearch.test.store.MockFSIndexStore; @@ -107,6 +108,7 @@ import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) public class CorruptedFileIT extends ESIntegTestCase { + @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() @@ -121,7 +123,8 @@ public class CorruptedFileIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(MockTransportService.TestPlugin.class, MockIndexEventListener.TestPlugin.class); + return pluginList(MockTransportService.TestPlugin.class, MockIndexEventListener.TestPlugin.class, MockFSIndexStore.TestPlugin.class, + InternalSettingsPlugin.class); // uses index.version.created } /** @@ -141,9 +144,8 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "1") .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files - .put("indices.recovery.concurrent_streams", 10) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files )); ensureGreen(); disableAllocation("test"); @@ -246,9 +248,8 @@ public class CorruptedFileIT extends ESIntegTestCase { assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files - .put("indices.recovery.concurrent_streams", 10) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files )); ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; @@ -329,7 +330,7 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put("index.routing.allocation.include._name", primariesNode.getNode().name()) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE) + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) )); ensureGreen(); // allocated with empty commit @@ -391,9 +392,9 @@ public class CorruptedFileIT extends ESIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, between(1, 4)) // don't go crazy here it must recovery fast // This does corrupt files on the replica, so we can't check: - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) .put("index.routing.allocation.include._name", primariesNode.getNode().name()) - .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE) + .put(EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), EnableAllocationDecider.Rebalance.NONE) )); ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; @@ -472,9 +473,8 @@ public class CorruptedFileIT extends ESIntegTestCase { assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") // no replicas for this test .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files - .put("indices.recovery.concurrent_streams", 10) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files )); ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; @@ -524,12 +524,11 @@ public class CorruptedFileIT extends ESIntegTestCase { internalCluster().ensureAtLeastNumDataNodes(2); assertAcked(prepareCreate("test").setSettings(Settings.builder() - .put(PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS, "one") + .put(PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS_SETTING.getKey(), "one") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, cluster().numDataNodes() - 1) .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose - .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files - .put("indices.recovery.concurrent_streams", 10) + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) // no checkindex - we corrupt shards on purpose + .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)) // no translog based flush - it might change the .liv / segments.N files )); ensureGreen(); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index 3ff7aa97d6a..500cf07692e 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -167,13 +167,13 @@ public class CorruptedTranslogIT extends ESIntegTestCase { /** Disables translog flushing for the specified index */ private static void disableTranslogFlush(String index) { - Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)).build(); + Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)).build(); client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); } /** Enables translog flushing for the specified index */ private static void enableTranslogFlush(String index) { - Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(512, ByteSizeUnit.MB)).build(); + Settings settings = Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(512, ByteSizeUnit.MB)).build(); client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); } } diff --git a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java index f27d9ddcd2b..214c86a498a 100644 --- a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.store.NIOFSDirectory; import org.apache.lucene.store.NoLockFactory; import org.apache.lucene.store.SimpleFSDirectory; +import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -49,7 +50,7 @@ public class IndexStoreTests extends ESTestCase { final Path tempDir = createTempDir().resolve("foo").resolve("0"); final IndexModule.Type[] values = IndexModule.Type.values(); final IndexModule.Type type = RandomPicks.randomFrom(random(), values); - Settings settings = Settings.settingsBuilder().put(IndexModule.STORE_TYPE, type.name().toLowerCase(Locale.ROOT)) + Settings settings = Settings.settingsBuilder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), type.name().toLowerCase(Locale.ROOT)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("foo"), settings); FsDirectoryService service = new FsDirectoryService(indexSettings, null, new ShardPath(false, tempDir, tempDir, "foo", new ShardId("foo", 0))); @@ -95,4 +96,24 @@ public class IndexStoreTests extends ESTestCase { } } } + + public void testUpdateThrottleType() throws IOException { + Settings settings = Settings.settingsBuilder().put(IndexStoreConfig.INDICES_STORE_THROTTLE_TYPE_SETTING.getKey(), "all") + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(new Index("foo"), settings); + IndexStoreConfig indexStoreConfig = new IndexStoreConfig(settings); + IndexStore store = new IndexStore(indexSettings, indexStoreConfig); + assertEquals(StoreRateLimiting.Type.NONE, store.rateLimiting().getType()); + assertEquals(StoreRateLimiting.Type.ALL, indexStoreConfig.getNodeRateLimiter().getType()); + assertNotSame(indexStoreConfig.getNodeRateLimiter(), store.rateLimiting()); + + store.setType(IndexStore.IndexRateLimitingType.fromString("NODE")); + assertEquals(StoreRateLimiting.Type.ALL, store.rateLimiting().getType()); + assertSame(indexStoreConfig.getNodeRateLimiter(), store.rateLimiting()); + + store.setType(IndexStore.IndexRateLimitingType.fromString("merge")); + assertEquals(StoreRateLimiting.Type.MERGE, store.rateLimiting().getType()); + assertNotSame(indexStoreConfig.getNodeRateLimiter(), store.rateLimiting()); + assertEquals(StoreRateLimiting.Type.ALL, indexStoreConfig.getNodeRateLimiter().getType()); + } } diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index acaa1cf8b5d..ed98fc1bacd 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -1154,7 +1154,7 @@ public class StoreTests extends ESTestCase { DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) - .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, TimeValue.timeValueMinutes(0)).build(); + .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(0)).build(); Store store = new Store(shardId, IndexSettingsModule.newIndexSettings(new Index("index"), settings), directoryService, new DummyShardLock(shardId)); long initialStoreSize = 0; for (String extraFiles : store.directory().listAll()) { diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index da5e5c8aa08..d0ef2331d9d 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -138,8 +138,8 @@ public class TranslogTests extends ESTestCase { private TranslogConfig getTranslogConfig(Path path) { Settings build = Settings.settingsBuilder() - .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) - .build(); + .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) + .build(); ByteSizeValue bufferSize = randomBoolean() ? TranslogConfig.DEFAULT_BUFFER_SIZE : new ByteSizeValue(10 + randomInt(128 * 1024), ByteSizeUnit.BYTES); return new TranslogConfig(shardId, path, IndexSettingsModule.newIndexSettings(shardId.index(), build), BigArrays.NON_RECYCLING_INSTANCE, bufferSize); } @@ -234,19 +234,16 @@ public class TranslogTests extends ESTestCase { ArrayList ops = new ArrayList<>(); Translog.Snapshot snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(0)); - snapshot.close(); addToTranslogAndList(translog, ops, new Translog.Index("test", "1", new byte[]{1})); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(ops.size())); - snapshot.close(); + assertThat(snapshot.totalOperations(), equalTo(ops.size())); addToTranslogAndList(translog, ops, new Translog.Delete(newUid("2"))); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(ops.size())); - snapshot.close(); + assertThat(snapshot.totalOperations(), equalTo(ops.size())); snapshot = translog.newSnapshot(); @@ -260,22 +257,18 @@ public class TranslogTests extends ESTestCase { assertThat(snapshot.next(), equalTo(null)); - snapshot.close(); - long firstId = translog.currentFileGeneration(); translog.prepareCommit(); assertThat(translog.currentFileGeneration(), Matchers.not(equalTo(firstId))); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(ops.size())); - snapshot.close(); + assertThat(snapshot.totalOperations(), equalTo(ops.size())); translog.commit(); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(0)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(0)); - snapshot.close(); + assertThat(snapshot.totalOperations(), equalTo(0)); } protected TranslogStats stats() throws IOException { @@ -337,9 +330,9 @@ public class TranslogTests extends ESTestCase { assertEquals(6, copy.estimatedNumberOfOperations()); assertEquals(431, copy.getTranslogSizeInBytes()); assertEquals("\"translog\"{\n" + - " \"operations\" : 6,\n" + - " \"size_in_bytes\" : 431\n" + - "}", copy.toString().trim()); + " \"operations\" : 6,\n" + + " \"size_in_bytes\" : 431\n" + + "}", copy.toString().trim()); try { new TranslogStats(1, -1); @@ -359,51 +352,43 @@ public class TranslogTests extends ESTestCase { ArrayList ops = new ArrayList<>(); Translog.Snapshot snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(0)); - snapshot.close(); addToTranslogAndList(translog, ops, new Translog.Index("test", "1", new byte[]{1})); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(1)); - snapshot.close(); + assertThat(snapshot.totalOperations(), equalTo(1)); snapshot = translog.newSnapshot(); - assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot.estimatedTotalOperations(), equalTo(1)); - - // snapshot while another is open Translog.Snapshot snapshot1 = translog.newSnapshot(); - assertThat(snapshot1, SnapshotMatchers.size(1)); - assertThat(snapshot1.estimatedTotalOperations(), equalTo(1)); + assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); + assertThat(snapshot.totalOperations(), equalTo(1)); - snapshot.close(); - snapshot1.close(); + assertThat(snapshot1, SnapshotMatchers.size(1)); + assertThat(snapshot1.totalOperations(), equalTo(1)); } public void testSnapshotWithNewTranslog() throws IOException { ArrayList ops = new ArrayList<>(); Translog.Snapshot snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(0)); - snapshot.close(); addToTranslogAndList(translog, ops, new Translog.Index("test", "1", new byte[]{1})); Translog.Snapshot snapshot1 = translog.newSnapshot(); addToTranslogAndList(translog, ops, new Translog.Index("test", "2", new byte[]{2})); + assertThat(snapshot1, SnapshotMatchers.equalsTo(ops.get(0))); + translog.prepareCommit(); addToTranslogAndList(translog, ops, new Translog.Index("test", "3", new byte[]{3})); - Translog.Snapshot snapshot2 = translog.newSnapshot(); - translog.commit(); - assertThat(snapshot2, SnapshotMatchers.equalsTo(ops)); - assertThat(snapshot2.estimatedTotalOperations(), equalTo(ops.size())); - - - assertThat(snapshot1, SnapshotMatchers.equalsTo(ops.get(0))); - snapshot1.close(); - snapshot2.close(); + try (Translog.View view = translog.newView()) { + Translog.Snapshot snapshot2 = translog.newSnapshot(); + translog.commit(); + assertThat(snapshot2, SnapshotMatchers.equalsTo(ops)); + assertThat(snapshot2.totalOperations(), equalTo(ops.size())); + } } public void testSnapshotOnClosedTranslog() throws IOException { @@ -418,39 +403,6 @@ public class TranslogTests extends ESTestCase { } } - public void testDeleteOnSnapshotRelease() throws Exception { - ArrayList firstOps = new ArrayList<>(); - addToTranslogAndList(translog, firstOps, new Translog.Index("test", "1", new byte[]{1})); - - Translog.Snapshot firstSnapshot = translog.newSnapshot(); - assertThat(firstSnapshot.estimatedTotalOperations(), equalTo(1)); - translog.commit(); - assertFileIsPresent(translog, 1); - - - ArrayList secOps = new ArrayList<>(); - addToTranslogAndList(translog, secOps, new Translog.Index("test", "2", new byte[]{2})); - assertThat(firstSnapshot.estimatedTotalOperations(), equalTo(1)); - - Translog.Snapshot secondSnapshot = translog.newSnapshot(); - translog.add(new Translog.Index("test", "3", new byte[]{3})); - assertThat(secondSnapshot, SnapshotMatchers.equalsTo(secOps)); - assertThat(secondSnapshot.estimatedTotalOperations(), equalTo(1)); - assertFileIsPresent(translog, 1); - assertFileIsPresent(translog, 2); - - firstSnapshot.close(); - assertFileDeleted(translog, 1); - assertFileIsPresent(translog, 2); - secondSnapshot.close(); - assertFileIsPresent(translog, 2); // it's the current nothing should be deleted - translog.commit(); - assertFileIsPresent(translog, 3); // it's the current nothing should be deleted - assertFileDeleted(translog, 2); - - } - - public void assertFileIsPresent(Translog translog, long id) { if (Files.exists(translogDir.resolve(Translog.getFilename(id)))) { return; @@ -624,14 +576,8 @@ public class TranslogTests extends ESTestCase { Translog.Snapshot snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(1)); assertFileIsPresent(translog, 1); - assertThat(snapshot.estimatedTotalOperations(), equalTo(1)); - if (randomBoolean()) { - translog.close(); - snapshot.close(); - } else { - snapshot.close(); - translog.close(); - } + assertThat(snapshot.totalOperations(), equalTo(1)); + translog.close(); assertFileIsPresent(translog, 1); } @@ -708,16 +654,21 @@ public class TranslogTests extends ESTestCase { public void onFailure(Throwable t) { logger.error("--> reader [{}] had an error", t, threadId); errors.add(t); - closeView(); + try { + closeView(); + } catch (IOException e) { + logger.error("unexpected error while closing view, after failure"); + t.addSuppressed(e); + } } - void closeView() { + void closeView() throws IOException { if (view != null) { view.close(); } } - void newView() { + void newView() throws IOException { closeView(); view = translog.newView(); // captures the currently written ops so we know what to expect from the view @@ -738,17 +689,16 @@ public class TranslogTests extends ESTestCase { // these are what we expect the snapshot to return (and potentially some more). Set expectedOps = new HashSet<>(writtenOps.keySet()); expectedOps.removeAll(writtenOpsAtView); - try (Translog.Snapshot snapshot = view.snapshot()) { - Translog.Operation op; - while ((op = snapshot.next()) != null) { - expectedOps.remove(op); - } + Translog.Snapshot snapshot = view.snapshot(); + Translog.Operation op; + while ((op = snapshot.next()) != null) { + expectedOps.remove(op); } if (expectedOps.isEmpty() == false) { StringBuilder missed = new StringBuilder("missed ").append(expectedOps.size()).append(" operations"); boolean failed = false; - for (Translog.Operation op : expectedOps) { - final Translog.Location loc = writtenOps.get(op); + for (Translog.Operation expectedOp : expectedOps) { + final Translog.Location loc = writtenOps.get(expectedOp); if (loc.generation < view.minTranslogGeneration()) { // writtenOps is only updated after the op was written to the translog. This mean // that ops written to the translog before the view was taken (and will be missing from the view) @@ -756,7 +706,7 @@ public class TranslogTests extends ESTestCase { continue; } failed = true; - missed.append("\n --> [").append(op).append("] written at ").append(loc); + missed.append("\n --> [").append(expectedOp).append("] written at ").append(loc); } if (failed) { fail(missed.toString()); @@ -803,7 +753,6 @@ public class TranslogTests extends ESTestCase { } } - public void testSyncUpTo() throws IOException { int translogOperations = randomIntBetween(10, 100); int count = 0; @@ -875,7 +824,7 @@ public class TranslogTests extends ESTestCase { final Translog.Location lastLocation = translog.add(new Translog.Index("test", "" + translogOperations, Integer.toString(translogOperations).getBytes(Charset.forName("UTF-8")))); final Checkpoint checkpoint = Checkpoint.read(translog.location().resolve(Translog.CHECKPOINT_FILE_NAME)); - try (final ImmutableTranslogReader reader = translog.openReader(translog.location().resolve(Translog.getFilename(translog.currentFileGeneration())), checkpoint)) { + try (final TranslogReader reader = translog.openReader(translog.location().resolve(Translog.getFilename(translog.currentFileGeneration())), checkpoint)) { assertEquals(lastSynced + 1, reader.totalOperations()); for (int op = 0; op < translogOperations; op++) { Translog.Location location = locations.get(op); @@ -913,7 +862,7 @@ public class TranslogTests extends ESTestCase { } writer.sync(); - final TranslogReader reader = randomBoolean() ? writer : translog.openReader(writer.path(), Checkpoint.read(translog.location().resolve(Translog.CHECKPOINT_FILE_NAME))); + final BaseTranslogReader reader = randomBoolean() ? writer : translog.openReader(writer.path(), Checkpoint.read(translog.location().resolve(Translog.CHECKPOINT_FILE_NAME))); for (int i = 0; i < numOps; i++) { ByteBuffer buffer = ByteBuffer.allocate(4); reader.readBytes(buffer, reader.getFirstOperationOffset() + 4 * i); @@ -926,7 +875,7 @@ public class TranslogTests extends ESTestCase { out.writeInt(2048); writer.add(new BytesArray(bytes)); - if (reader instanceof ImmutableTranslogReader) { + if (reader instanceof TranslogReader) { ByteBuffer buffer = ByteBuffer.allocate(4); try { reader.readBytes(buffer, reader.getFirstOperationOffset() + 4 * numOps); @@ -934,6 +883,7 @@ public class TranslogTests extends ESTestCase { } catch (EOFException ex) { // expected } + ((TranslogReader) reader).close(); } else { // live reader! ByteBuffer buffer = ByteBuffer.allocate(4); @@ -943,7 +893,43 @@ public class TranslogTests extends ESTestCase { final int value = buffer.getInt(); assertEquals(2048, value); } - IOUtils.close(writer, reader); + IOUtils.close(writer); + } + + public void testFailWriterWhileClosing() throws IOException { + Path tempDir = createTempDir(); + final FailSwitch fail = new FailSwitch(); + fail.failNever(); + TranslogConfig config = getTranslogConfig(tempDir); + try (Translog translog = getFailableTranslog(fail, config)) { + final TranslogWriter writer = translog.createWriter(0); + final int numOps = randomIntBetween(10, 100); + byte[] bytes = new byte[4]; + ByteArrayDataOutput out = new ByteArrayDataOutput(bytes); + for (int i = 0; i < numOps; i++) { + out.reset(bytes); + out.writeInt(i); + writer.add(new BytesArray(bytes)); + } + writer.sync(); + try { + fail.failAlways(); + writer.closeIntoReader(); + fail(); + } catch (MockDirectoryWrapper.FakeIOException ex) { + } + try (TranslogReader reader = translog.openReader(writer.path(), Checkpoint.read(translog.location().resolve(Translog.CHECKPOINT_FILE_NAME)))) { + for (int i = 0; i < numOps; i++) { + ByteBuffer buffer = ByteBuffer.allocate(4); + reader.readBytes(buffer, reader.getFirstOperationOffset() + 4 * i); + buffer.flip(); + final int value = buffer.getInt(); + assertEquals(i, value); + } + } + + } + } public void testBasicRecovery() throws IOException { @@ -971,19 +957,17 @@ public class TranslogTests extends ESTestCase { assertEquals(0, translog.stats().estimatedNumberOfOperations()); assertEquals(1, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - assertNull(snapshot.next()); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + assertNull(snapshot.next()); } else { assertEquals("lastCommitted must be 1 less than current", translogGeneration.translogFileGeneration + 1, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - for (int i = minUncommittedOp; i < translogOperations; i++) { - assertEquals("expected operation" + i + " to be in the previous translog but wasn't", translog.currentFileGeneration() - 1, locations.get(i).generation); - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + for (int i = minUncommittedOp; i < translogOperations; i++) { + assertEquals("expected operation" + i + " to be in the previous translog but wasn't", translog.currentFileGeneration() - 1, locations.get(i).generation); + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1014,13 +998,12 @@ public class TranslogTests extends ESTestCase { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - int upTo = sync ? translogOperations : prepareOp; - for (int i = 0; i < upTo; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + int upTo = sync ? translogOperations : prepareOp; + for (int i = 0; i < upTo; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null synced: " + sync, next); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); } } if (randomBoolean()) { // recover twice @@ -1028,13 +1011,12 @@ public class TranslogTests extends ESTestCase { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 3 less than current - we never finished the commit and run recovery twice", translogGeneration.translogFileGeneration + 3, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - int upTo = sync ? translogOperations : prepareOp; - for (int i = 0; i < upTo; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + int upTo = sync ? translogOperations : prepareOp; + for (int i = 0; i < upTo; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null synced: " + sync, next); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1071,14 +1053,14 @@ public class TranslogTests extends ESTestCase { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - int upTo = sync ? translogOperations : prepareOp; - for (int i = 0; i < upTo; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + int upTo = sync ? translogOperations : prepareOp; + for (int i = 0; i < upTo; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null synced: " + sync, next); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); } + } if (randomBoolean()) { // recover twice @@ -1086,13 +1068,12 @@ public class TranslogTests extends ESTestCase { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 3 less than current - we never finished the commit and run recovery twice", translogGeneration.translogFileGeneration + 3, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - int upTo = sync ? translogOperations : prepareOp; - for (int i = 0; i < upTo; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + int upTo = sync ? translogOperations : prepareOp; + for (int i = 0; i < upTo; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null synced: " + sync, next); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1132,13 +1113,12 @@ public class TranslogTests extends ESTestCase { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - int upTo = sync ? translogOperations : prepareOp; - for (int i = 0; i < upTo; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + int upTo = sync ? translogOperations : prepareOp; + for (int i = 0; i < upTo; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null synced: " + sync, next); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1209,14 +1189,13 @@ public class TranslogTests extends ESTestCase { } config.setTranslogGeneration(translogGeneration); this.translog = new Translog(config); - try (Translog.Snapshot snapshot = this.translog.newSnapshot()) { - for (int i = firstUncommitted; i < translogOperations; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("" + i, next); - assertEquals(Integer.parseInt(next.getSource().source.toUtf8()), i); - } - assertNull(snapshot.next()); + Translog.Snapshot snapshot = this.translog.newSnapshot(); + for (int i = firstUncommitted; i < translogOperations; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("" + i, next); + assertEquals(Integer.parseInt(next.getSource().source.toUtf8()), i); } + assertNull(snapshot.next()); } public void testFailOnClosedWrite() throws IOException { @@ -1287,12 +1266,12 @@ public class TranslogTests extends ESTestCase { case CREATE: case INDEX: op = new Translog.Index("test", threadId + "_" + opCount, - randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8")); + randomUnicodeOfLengthBetween(1, 20 * 1024).getBytes("UTF-8")); break; case DELETE: op = new Translog.Delete(new Term("_uid", threadId + "_" + opCount), - 1 + randomInt(100000), - randomFrom(VersionType.values())); + 1 + randomInt(100000), + randomFrom(VersionType.values())); break; default: throw new ElasticsearchException("not supported op type"); @@ -1383,14 +1362,13 @@ public class TranslogTests extends ESTestCase { assertEquals("lastCommitted must be 1 less than current", translogGeneration.translogFileGeneration + 1, tlog.currentFileGeneration()); assertFalse(tlog.syncNeeded()); - try (Translog.Snapshot snapshot = tlog.newSnapshot()) { - assertEquals(opsSynced, snapshot.estimatedTotalOperations()); - for (int i = 0; i < opsSynced; i++) { - assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, locations.get(i).generation); - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = tlog.newSnapshot(); + assertEquals(opsSynced, snapshot.totalOperations()); + for (int i = 0; i < opsSynced; i++) { + assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, locations.get(i).generation); + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1401,13 +1379,12 @@ public class TranslogTests extends ESTestCase { LineFileDocs lineFileDocs = new LineFileDocs(random()); // writes pretty big docs so we cross buffer boarders regularly for (int opsAdded = 0; opsAdded < numOps; opsAdded++) { locations.add(translog.add(new Translog.Index("test", "" + opsAdded, lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))))); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - assertEquals(opsAdded + 1, snapshot.estimatedTotalOperations()); - for (int i = 0; i < opsAdded; i++) { - assertEquals("expected operation" + i + " to be in the current translog but wasn't", translog.currentFileGeneration(), locations.get(i).generation); - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - } + Translog.Snapshot snapshot = this.translog.newSnapshot(); + assertEquals(opsAdded + 1, snapshot.totalOperations()); + for (int i = 0; i < opsAdded; i++) { + assertEquals("expected operation" + i + " to be in the current translog but wasn't", translog.currentFileGeneration(), locations.get(i).generation); + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); } } } @@ -1511,20 +1488,20 @@ public class TranslogTests extends ESTestCase { } config.setTranslogGeneration(translog.getGeneration()); try (Translog tlog = new Translog(config)) { - try (Translog.Snapshot snapshot = tlog.newSnapshot()) { - if (writtenOperations.size() != snapshot.estimatedTotalOperations()) { - for (int i = 0; i < threadCount; i++) { - if (threadExceptions[i] != null) - threadExceptions[i].printStackTrace(); + Translog.Snapshot snapshot = tlog.newSnapshot(); + if (writtenOperations.size() != snapshot.totalOperations()) { + for (int i = 0; i < threadCount; i++) { + if (threadExceptions[i] != null) { + threadExceptions[i].printStackTrace(); } } - assertEquals(writtenOperations.size(), snapshot.estimatedTotalOperations()); - for (int i = 0; i < writtenOperations.size(); i++) { - assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, writtenOperations.get(i).location.generation); - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals(next, writtenOperations.get(i).operation); - } + } + assertEquals(writtenOperations.size(), snapshot.totalOperations()); + for (int i = 0; i < writtenOperations.size(); i++) { + assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, writtenOperations.get(i).location.generation); + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals(next, writtenOperations.get(i).operation); } } } @@ -1537,6 +1514,7 @@ public class TranslogTests extends ESTestCase { private static class FailSwitch { private volatile int failRate; private volatile boolean onceFailedFailAlways = false; + public boolean fail() { boolean fail = randomIntBetween(1, 100) <= failRate; if (fail && onceFailedFailAlways) { @@ -1716,24 +1694,22 @@ public class TranslogTests extends ESTestCase { try (Translog tlog = new Translog(config)) { assertNotNull(translogGeneration); assertFalse(tlog.syncNeeded()); - try (Translog.Snapshot snapshot = tlog.newSnapshot()) { - for (int i = 0; i < 1; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = tlog.newSnapshot(); + for (int i = 0; i < 1; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); } tlog.add(new Translog.Index("test", "" + 1, Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } try (Translog tlog = new Translog(config)) { assertNotNull(translogGeneration); assertFalse(tlog.syncNeeded()); - try (Translog.Snapshot snapshot = tlog.newSnapshot()) { - for (int i = 0; i < 2; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = tlog.newSnapshot(); + for (int i = 0; i < 2; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); } } } @@ -1749,7 +1725,7 @@ public class TranslogTests extends ESTestCase { Files.createFile(config.getTranslogPath().resolve("translog-" + (read.generation + 1) + ".tlog")); config.setTranslogGeneration(translogGeneration); - try { + try { Translog tlog = new Translog(config); fail("file already exists?"); } catch (TranslogException ex) { @@ -1758,6 +1734,7 @@ public class TranslogTests extends ESTestCase { assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class); } } + public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException { translog.add(new Translog.Index("test", "" + 0, Integer.toString(0).getBytes(Charset.forName("UTF-8")))); Translog.TranslogGeneration translogGeneration = translog.getGeneration(); @@ -1774,17 +1751,16 @@ public class TranslogTests extends ESTestCase { try (Translog tlog = new Translog(config)) { assertNotNull(translogGeneration); assertFalse(tlog.syncNeeded()); - try (Translog.Snapshot snapshot = tlog.newSnapshot()) { - for (int i = 0; i < 1; i++) { - Translog.Operation next = snapshot.next(); - assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); - } + Translog.Snapshot snapshot = tlog.newSnapshot(); + for (int i = 0; i < 1; i++) { + Translog.Operation next = snapshot.next(); + assertNotNull("operation " + i + " must be non-null", next); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); } tlog.add(new Translog.Index("test", "" + 1, Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } - try { + try { Translog tlog = new Translog(config); fail("file already exists?"); } catch (TranslogException ex) { @@ -1863,13 +1839,12 @@ public class TranslogTests extends ESTestCase { } try (Translog translog = new Translog(config)) { - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - assertEquals(syncedDocs.size(), snapshot.estimatedTotalOperations()); - for (int i = 0; i < syncedDocs.size(); i++) { - Translog.Operation next = snapshot.next(); - assertEquals(syncedDocs.get(i), next.getSource().source.toUtf8()); - assertNotNull("operation " + i + " must be non-null", next); - } + Translog.Snapshot snapshot = translog.newSnapshot(); + assertEquals(syncedDocs.size(), snapshot.totalOperations()); + for (int i = 0; i < syncedDocs.size(); i++) { + Translog.Operation next = snapshot.next(); + assertEquals(syncedDocs.get(i), next.getSource().source.toUtf8()); + assertNotNull("operation " + i + " must be non-null", next); } } } diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java index 68f26c504fb..8ae7117d483 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.translog; import org.apache.lucene.util.IOUtils; -import org.elasticsearch.index.VersionType; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -29,66 +28,32 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; /** * Tests for reading old and new translog files */ public class TranslogVersionTests extends ESTestCase { - public void testV0LegacyTranslogVersion() throws Exception { - Path translogFile = getDataPath("/org/elasticsearch/index/translog/translog-v0.binary"); - assertThat("test file should exist", Files.exists(translogFile), equalTo(true)); - try (ImmutableTranslogReader reader = openReader(translogFile, 0)) { - assertThat("a version0 stream is returned", reader instanceof LegacyTranslogReader, equalTo(true)); - try (final Translog.Snapshot snapshot = reader.newSnapshot()) { - final Translog.Operation operation = snapshot.next(); - assertThat("operation is the correct type correctly", operation.opType() == Translog.Operation.Type.INDEX, equalTo(true)); - Translog.Index op = (Translog.Index) operation; - assertThat(op.id(), equalTo("1")); - assertThat(op.type(), equalTo("doc")); - assertThat(op.source().toUtf8(), equalTo("{\"body\": \"worda wordb wordc wordd \\\"worde\\\" wordf\"}")); - assertThat(op.routing(), equalTo(null)); - assertThat(op.parent(), equalTo(null)); - assertThat(op.version(), equalTo(1L)); - assertThat(op.timestamp(), equalTo(1407312091791L)); - assertThat(op.ttl(), equalTo(-1L)); - assertThat(op.versionType(), equalTo(VersionType.INTERNAL)); - assertNull(snapshot.next()); - } + private void checkFailsToOpen(String file, String expectedMessage) throws IOException { + Path translogFile = getDataPath(file); + assertThat("test file should exist", Files.exists(translogFile), equalTo(true)); + try { + openReader(translogFile, 0); + fail("should be able to open an old translog"); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), containsString(expectedMessage)); } + + } + + public void testV0LegacyTranslogVersion() throws Exception { + checkFailsToOpen("/org/elasticsearch/index/translog/translog-v0.binary", "pre-1.4 translog"); } public void testV1ChecksummedTranslogVersion() throws Exception { - Path translogFile = getDataPath("/org/elasticsearch/index/translog/translog-v1.binary"); - assertThat("test file should exist", Files.exists(translogFile), equalTo(true)); - try (ImmutableTranslogReader reader = openReader(translogFile, 0)) { - try (final Translog.Snapshot snapshot = reader.newSnapshot()) { - - assertThat("a version1 stream is returned", reader instanceof ImmutableTranslogReader, equalTo(true)); - - Translog.Operation operation = snapshot.next(); - - assertThat("operation is the correct type correctly", operation.opType() == Translog.Operation.Type.INDEX, equalTo(true)); - Translog.Index op = (Translog.Index) operation; - assertThat(op.id(), equalTo("Bwiq98KFSb6YjJQGeSpeiw")); - assertThat(op.type(), equalTo("doc")); - assertThat(op.source().toUtf8(), equalTo("{\"body\": \"foo\"}")); - assertThat(op.routing(), equalTo(null)); - assertThat(op.parent(), equalTo(null)); - assertThat(op.version(), equalTo(1L)); - assertThat(op.timestamp(), equalTo(1408627184844L)); - assertThat(op.ttl(), equalTo(-1L)); - assertThat(op.versionType(), equalTo(VersionType.INTERNAL)); - - // There are more operations - int opNum = 1; - while (snapshot.next() != null) { - opNum++; - } - assertThat("there should be 5 translog operations", opNum, equalTo(5)); - } - } + checkFailsToOpen("/org/elasticsearch/index/translog/translog-v1.binary", "pre-2.0 translog"); } public void testCorruptedTranslogs() throws Exception { @@ -112,47 +77,17 @@ public class TranslogVersionTests extends ESTestCase { e.getMessage().contains("Invalid first byte in translog file, got: 1, expected 0x00 or 0x3f"), equalTo(true)); } - try { - Path translogFile = getDataPath("/org/elasticsearch/index/translog/translog-v1-corrupted-body.binary"); - assertThat("test file should exist", Files.exists(translogFile), equalTo(true)); - try (ImmutableTranslogReader reader = openReader(translogFile, 0)) { - try (final Translog.Snapshot snapshot = reader.newSnapshot()) { - while(snapshot.next() != null) { - - } - } - } - fail("should have thrown an exception about the body being corrupted"); - } catch (TranslogCorruptedException e) { - assertThat("translog corruption from body: " + e.getMessage(), - e.getMessage().contains("translog corruption while reading from stream"), equalTo(true)); - } - + checkFailsToOpen("/org/elasticsearch/index/translog/translog-v1-corrupted-body.binary", "pre-2.0 translog"); } public void testTruncatedTranslog() throws Exception { - try { - Path translogFile = getDataPath("/org/elasticsearch/index/translog/translog-v1-truncated.binary"); - assertThat("test file should exist", Files.exists(translogFile), equalTo(true)); - try (ImmutableTranslogReader reader = openReader(translogFile, 0)) { - try (final Translog.Snapshot snapshot = reader.newSnapshot()) { - while(snapshot.next() != null) { - - } - } - } - fail("should have thrown an exception about the body being truncated"); - } catch (TranslogCorruptedException e) { - assertThat("translog truncated: " + e.getMessage(), - e.getMessage().contains("operation size is corrupted must be"), equalTo(true)); - } + checkFailsToOpen("/org/elasticsearch/index/translog/translog-v1-truncated.binary", "pre-2.0 translog"); } - public ImmutableTranslogReader openReader(Path path, long id) throws IOException { + public TranslogReader openReader(Path path, long id) throws IOException { FileChannel channel = FileChannel.open(path, StandardOpenOption.READ); try { - final ChannelReference raf = new ChannelReference(path, id, channel, null); - ImmutableTranslogReader reader = ImmutableTranslogReader.open(raf, new Checkpoint(Files.size(path), TranslogReader.UNKNOWN_OP_COUNT, id), null); + TranslogReader reader = TranslogReader.open(channel, path, new Checkpoint(Files.size(path), 1, id), null); channel = null; return reader; } finally { diff --git a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java index 8abe19ffbb6..4853d59588b 100644 --- a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java @@ -63,7 +63,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { Settings settings = settingsBuilder() .put(SETTING_NUMBER_OF_SHARDS, 11) .put(SETTING_NUMBER_OF_REPLICAS, 1) - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "0s") + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "0s") .build(); // start one server diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java index 4f6aaf25705..ed8c27892b9 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerIT.java @@ -84,7 +84,7 @@ public class IndicesLifecycleListenerIT extends ESIntegTestCase { @Override public void beforeIndexAddedToCluster(Index index, Settings indexSettings) { beforeAddedCount.incrementAndGet(); - if (indexSettings.getAsBoolean("index.fail", false)) { + if (MockIndexEventListener.TestPlugin.INDEX_FAIL.get(indexSettings)) { throw new ElasticsearchException("failing on purpose"); } } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java deleted file mode 100644 index ec182a69890..00000000000 --- a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.indices; - -import org.elasticsearch.common.inject.ModuleTestCase; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.index.query.TermQueryParser; - -import java.io.IOException; - -public class IndicesModuleTests extends ModuleTestCase { - - static class FakeQueryParser implements QueryParser { - @Override - public String[] names() { - return new String[] {"fake-query-parser"}; - } - - @Override - public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { - return null; - } - - @Override - public QueryBuilder getBuilderPrototype() { - return null; - } - } - - public void testRegisterQueryParser() { - IndicesModule module = new IndicesModule(); - module.registerQueryParser(FakeQueryParser.class); - assertSetMultiBinding(module, QueryParser.class, FakeQueryParser.class); - } - - public void testRegisterQueryParserDuplicate() { - IndicesModule module = new IndicesModule(); - try { - module.registerQueryParser(TermQueryParser.class); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Can't register the same [query_parser] more than once for [" + TermQueryParser.class.getName() + "]"); - } - } - - public void testRegisterHunspellDictionaryDuplicate() { - IndicesModule module = new IndicesModule(); - try { - module.registerQueryParser(TermQueryParser.class); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Can't register the same [query_parser] more than once for [" + TermQueryParser.class.getName() + "]"); - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index aeb4ac55410..6d0a8955116 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -46,11 +46,17 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.suggest.SuggestRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.suggest.SuggestBuilders; import org.elasticsearch.test.ESIntegTestCase; +import java.util.Collection; +import java.util.function.Function; + import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -60,6 +66,12 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class IndicesOptionsIntegrationIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(TestPlugin.class); // + } + public void testSpecifiedIndexUnavailableMultipleIndices() throws Exception { createIndex("test1"); ensureYellow(); @@ -619,6 +631,29 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { assertThat(client().admin().indices().prepareGetMappings("barbaz").get().mappings().get("barbaz").get("type4"), notNullValue()); } + public static final class TestPlugin extends Plugin { + @Override + public String name() { + return "index-a-setting"; + } + + @Override + public String description() { + return "a plugin that adds a dynamic tst setting"; + } + + private static final Setting INDEX_A = new Setting<>("index.a", "", Function.identity(), true, Setting.Scope.INDEX); + private static final Setting INDEX_C = new Setting<>("index.c", "", Function.identity(), true, Setting.Scope.INDEX); + private static final Setting INDEX_E = new Setting<>("index.e", "", Function.identity(), false, Setting.Scope.INDEX); + + + public void onModule(SettingsModule module) { + module.registerSetting(INDEX_A); + module.registerSetting(INDEX_C); + module.registerSetting(INDEX_E); + } + } + public void testUpdateSettings() throws Exception { verify(client().admin().indices().prepareUpdateSettings("foo").setSettings(Settings.builder().put("a", "b")), true); verify(client().admin().indices().prepareUpdateSettings("_all").setSettings(Settings.builder().put("a", "b")), true); diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java index 4130cf5ad8a..cd4ca35bbc5 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESBackcompatTestCase; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.util.ArrayList; @@ -46,7 +47,7 @@ import static org.hamcrest.Matchers.notNullValue; public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(DummyAnalysisPlugin.class); + return pluginList(DummyAnalysisPlugin.class, InternalSettingsPlugin.class); } public void testThatPreBuiltAnalyzersAreNotClosedOnIndexClose() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java index 722a4ebde8a..c539e2a68a0 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceIT.java @@ -22,6 +22,7 @@ import org.apache.lucene.analysis.hunspell.Dictionary; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.indices.analysis.HunspellService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -39,9 +40,9 @@ import static org.hamcrest.Matchers.notNullValue; public class HunspellServiceIT extends ESIntegTestCase { public void testLocaleDirectoryWithNodeLevelConfig() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.conf", getDataPath("/indices/analyze/conf_dir")) - .put(HUNSPELL_LAZY_LOAD, randomBoolean()) - .put(HUNSPELL_IGNORE_CASE, true) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) + .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) + .put(HUNSPELL_IGNORE_CASE.getKey(), true) .build(); internalCluster().startNode(settings); @@ -52,9 +53,9 @@ public class HunspellServiceIT extends ESIntegTestCase { public void testLocaleDirectoryWithLocaleSpecificConfig() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.conf", getDataPath("/indices/analyze/conf_dir")) - .put(HUNSPELL_LAZY_LOAD, randomBoolean()) - .put(HUNSPELL_IGNORE_CASE, true) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/conf_dir")) + .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) + .put(HUNSPELL_IGNORE_CASE.getKey(), true) .put("indices.analysis.hunspell.dictionary.en_US.strict_affix_parsing", false) .put("indices.analysis.hunspell.dictionary.en_US.ignore_case", false) .build(); @@ -74,8 +75,8 @@ public class HunspellServiceIT extends ESIntegTestCase { public void testDicWithNoAff() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.conf", getDataPath("/indices/analyze/no_aff_conf_dir")) - .put(HUNSPELL_LAZY_LOAD, randomBoolean()) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/no_aff_conf_dir")) + .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) .build(); Dictionary dictionary = null; @@ -92,8 +93,8 @@ public class HunspellServiceIT extends ESIntegTestCase { public void testDicWithTwoAffs() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.conf", getDataPath("/indices/analyze/two_aff_conf_dir")) - .put(HUNSPELL_LAZY_LOAD, randomBoolean()) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/indices/analyze/two_aff_conf_dir")) + .put(HUNSPELL_LAZY_LOAD.getKey(), randomBoolean()) .build(); Dictionary dictionary = null; diff --git a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java index ffb86c77c72..46402c6054a 100644 --- a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java +++ b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java @@ -40,7 +40,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { public void testCacheAggs() throws Exception { assertAcked(client().admin().indices().prepareCreate("index") .addMapping("type", "f", "type=date") - .setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, true).get()); + .setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true).get()); indexRandom(true, client().prepareIndex("index", "type").setSource("f", "2014-03-10T00:00:00.000Z"), client().prepareIndex("index", "type").setSource("f", "2014-05-13T00:00:00.000Z")); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index 370770481cb..c8a80f6621f 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -153,7 +153,7 @@ public class FlushIT extends ESIntegTestCase { createIndex("test"); client().admin().indices().prepareUpdateSettings("test").setSettings( - Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)).put("index.refresh_interval", -1).put("index.number_of_replicas", internalCluster().numDataNodes() - 1)) + Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)).put("index.refresh_interval", -1).put("index.number_of_replicas", internalCluster().numDataNodes() - 1)) .get(); ensureGreen(); final AtomicBoolean stop = new AtomicBoolean(false); diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index 1af04e295dd..6cdd4cf348a 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -64,11 +64,11 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { logger.info("--> resetting breaker settings"); Settings resetSettings = settingsBuilder() .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), - HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getDefault(null)) + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_LIMIT_SETTING.getDefaultRaw(null)) .put(HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), - HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getDefault(null)) + HierarchyCircuitBreakerService.FIELDDATA_CIRCUIT_BREAKER_OVERHEAD_SETTING.getDefaultRaw(null)) .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), - HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getDefault(null)) + HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getDefaultRaw(null)) .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 1.0) .build(); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(resetSettings)); diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index f4f7fe0a1df..4807cf61dea 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -30,7 +30,9 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.Requests; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.MockEngineFactoryPlugin; @@ -58,7 +60,7 @@ import static org.hamcrest.Matchers.equalTo; public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class); + return pluginList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class, MockEngineFactoryPlugin.class); } public void testBreakerWithRandomExceptions() throws IOException, InterruptedException, ExecutionException { @@ -195,6 +197,8 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { // TODO: Generalize this class and add it as a utility public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper { + public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); public static class TestPlugin extends Plugin { @Override public String name() { @@ -205,6 +209,11 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { return "a mock reader wrapper that throws random exceptions for testing"; } + public void onModule(SettingsModule module) { + module.registerSetting(EXCEPTION_TOP_LEVEL_RATIO_SETTING); + module.registerSetting(EXCEPTION_LOW_LEVEL_RATIO_SETTING); + } + public void onModule(MockEngineFactoryPlugin.MockEngineReaderModule module) { module.setReaderClass(RandomExceptionDirectoryReaderWrapper.class); } @@ -218,9 +227,9 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { private final double lowLevelRatio; ThrowingSubReaderWrapper(Settings settings) { - final long seed = settings.getAsLong(SETTING_INDEX_SEED, 0l); - this.topLevelRatio = settings.getAsDouble(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d); - this.lowLevelRatio = settings.getAsDouble(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d); + final long seed = ESIntegTestCase.INDEX_TEST_SEED_SETTING.get(settings); + this.topLevelRatio = EXCEPTION_TOP_LEVEL_RATIO_SETTING.get(settings); + this.lowLevelRatio = EXCEPTION_LOW_LEVEL_RATIO_SETTING.get(settings); this.random = new Random(seed); } diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index dc61d4bc5fe..dacf23758e8 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -52,6 +52,7 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.store.MockFSDirectoryService; +import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.Transport; @@ -97,7 +98,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(MockTransportService.TestPlugin.class); + return pluginList(MockTransportService.TestPlugin.class, MockFSIndexStore.TestPlugin.class); } private void assertRecoveryStateWithoutStage(RecoveryState state, int shardId, Type type, @@ -497,7 +498,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { logger.info("--> creating test index: {}", name); assertAcked(prepareCreate(name, nodeCount, settingsBuilder().put("number_of_shards", shardCount) - .put("number_of_replicas", replicaCount).put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0))); + .put("number_of_replicas", replicaCount).put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), 0))); ensureGreen(); logger.info("--> indexing sample data"); @@ -530,7 +531,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { final Settings nodeSettings = Settings.builder() .put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), "100ms") .put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), "1s") - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) // restarted recoveries will delete temp files and write them again + .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) // restarted recoveries will delete temp files and write them again .build(); // start a master node internalCluster().startNode(nodeSettings); @@ -547,7 +548,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { client().admin().indices().prepareCreate(indexName) .setSettings( Settings.builder() - .put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "color", "blue") + .put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "color", "blue") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) ).get(); @@ -593,7 +594,7 @@ public class IndexRecoveryIT extends ESIntegTestCase { logger.info("--> starting recovery from blue to red"); client().admin().indices().prepareUpdateSettings(indexName).setSettings( Settings.builder() - .put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + "color", "red,blue") + .put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "color", "red,blue") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) ).get(); diff --git a/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java b/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java index 4886ee0886b..de5bbfbe837 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/GetSettingsBlocksIT.java @@ -41,7 +41,7 @@ public class GetSettingsBlocksIT extends ESIntegTestCase { .setSettings(Settings.settingsBuilder() .put("index.refresh_interval", -1) .put("index.merge.policy.expunge_deletes_allowed", "30") - .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING, false))); + .put(MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey(), false))); for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { try { @@ -50,7 +50,7 @@ public class GetSettingsBlocksIT extends ESIntegTestCase { assertThat(response.getIndexToSettings().size(), greaterThanOrEqualTo(1)); assertThat(response.getSetting("test", "index.refresh_interval"), equalTo("-1")); assertThat(response.getSetting("test", "index.merge.policy.expunge_deletes_allowed"), equalTo("30")); - assertThat(response.getSetting("test", MapperService.INDEX_MAPPER_DYNAMIC_SETTING), equalTo("false")); + assertThat(response.getSetting("test", MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey()), equalTo("false")); } finally { disableIndexBlock("test", block); } diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java index 53a93fd0bd1..dc48b96c23f 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java @@ -263,16 +263,16 @@ public class UpdateNumberOfReplicasIT extends ESIntegTestCase { public void testUpdateWithInvalidNumberOfReplicas() { createIndex("test"); + final int value = randomIntBetween(-10, -1); try { client().admin().indices().prepareUpdateSettings("test") .setSettings(Settings.settingsBuilder() - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(-10, -1)) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, value) ) .execute().actionGet(); fail("should have thrown an exception about the replica shard count"); } catch (IllegalArgumentException e) { - assertThat("message contains error about shard count: " + e.getMessage(), - e.getMessage().contains("the value of the setting index.number_of_replicas must be a non negative integer"), equalTo(true)); + assertEquals("Failed to parse value [" + value + "] for setting [index.number_of_replicas] must be >= 0", e.getMessage()); } } } diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index 52b4c653746..67fc5acd09a 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -30,11 +30,13 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.MergeSchedulerConfig; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -50,13 +52,47 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class UpdateSettingsIT extends ESIntegTestCase { + + public void testResetDefault() { + createIndex("test"); + + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.settingsBuilder() + .put("index.refresh_interval", -1) + .put("index.translog.flush_threshold_size", "1024b") + ) + .execute().actionGet(); + IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); + assertEquals(indexMetaData.getSettings().get("index.refresh_interval"), "-1"); + for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { + IndexService indexService = service.indexService("test"); + if (indexService != null) { + assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), -1); + assertEquals(indexService.getIndexSettings().getFlushThresholdSize().bytes(), 1024); + } + } + client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.settingsBuilder() + .putNull("index.refresh_interval") + ) + .execute().actionGet(); + indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); + assertNull(indexMetaData.getSettings().get("index.refresh_interval")); + for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { + IndexService indexService = service.indexService("test"); + if (indexService != null) { + assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), 1000); + assertEquals(indexService.getIndexSettings().getFlushThresholdSize().bytes(), 1024); + } + } + } public void testOpenCloseUpdateSettings() throws Exception { createIndex("test"); try { client().admin().indices().prepareUpdateSettings("test") .setSettings(Settings.settingsBuilder() .put("index.refresh_interval", -1) // this one can change - .put("index.cache.filter.type", "none") // this one can't + .put("index.fielddata.cache", "none") // this one can't ) .execute().actionGet(); fail(); @@ -66,12 +102,12 @@ public class UpdateSettingsIT extends ESIntegTestCase { IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getSettings().get("index.refresh_interval"), nullValue()); - assertThat(indexMetaData.getSettings().get("index.cache.filter.type"), nullValue()); + assertThat(indexMetaData.getSettings().get("index.fielddata.cache"), nullValue()); // Now verify via dedicated get settings api: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), nullValue()); - assertThat(getSettingsResponse.getSetting("test", "index.cache.filter.type"), nullValue()); + assertThat(getSettingsResponse.getSetting("test", "index.fielddata.cache"), nullValue()); client().admin().indices().prepareUpdateSettings("test") .setSettings(Settings.settingsBuilder() @@ -107,18 +143,18 @@ public class UpdateSettingsIT extends ESIntegTestCase { client().admin().indices().prepareUpdateSettings("test") .setSettings(Settings.settingsBuilder() .put("index.refresh_interval", "1s") // this one can change - .put("index.cache.filter.type", "none") // this one can't + .put("index.fielddata.cache", "none") // this one can't ) .execute().actionGet(); indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); assertThat(indexMetaData.getSettings().get("index.refresh_interval"), equalTo("1s")); - assertThat(indexMetaData.getSettings().get("index.cache.filter.type"), equalTo("none")); + assertThat(indexMetaData.getSettings().get("index.fielddata.cache"), equalTo("none")); // Now verify via dedicated get settings api: getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); assertThat(getSettingsResponse.getSetting("test", "index.refresh_interval"), equalTo("1s")); - assertThat(getSettingsResponse.getSetting("test", "index.cache.filter.type"), equalTo("none")); + assertThat(getSettingsResponse.getSetting("test", "index.fielddata.cache"), equalTo("none")); } public void testEngineGCDeletesSetting() throws InterruptedException { @@ -142,14 +178,14 @@ public class UpdateSettingsIT extends ESIntegTestCase { // No throttling at first, only 1 non-replicated shard, force lots of merging: assertAcked(prepareCreate("test") .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "none") + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "none") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "2") - .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL, 0) // get stats all the time - no caching + .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "2") + .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), 0) // get stats all the time - no caching )); ensureGreen(); long termUpto = 0; @@ -180,13 +216,13 @@ public class UpdateSettingsIT extends ESIntegTestCase { .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "merge") - .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, "1mb")) + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "merge") + .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), "1mb")) .get(); // Make sure setting says it is in fact changed: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); - assertThat(getSettingsResponse.getSetting("test", IndexStore.INDEX_STORE_THROTTLE_TYPE), equalTo("merge")); + assertThat(getSettingsResponse.getSetting("test", IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey()), equalTo("merge")); // Also make sure we see throttling kicking in: boolean done = false; @@ -220,7 +256,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "none")) + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "none")) .get(); // Optimize does a waitForMerges, which we must do to make sure all in-flight (throttled) merges finish: @@ -310,11 +346,11 @@ public class UpdateSettingsIT extends ESIntegTestCase { .setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "2") - .put(MergeSchedulerConfig.AUTO_THROTTLE, "true") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), "true") )); // Disable auto throttle: @@ -323,7 +359,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder() - .put(MergeSchedulerConfig.AUTO_THROTTLE, "no")) + .put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), "false")) .get(); // Make sure we log the change: @@ -331,7 +367,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { // Make sure setting says it is in fact changed: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); - assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.AUTO_THROTTLE), equalTo("no")); + assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey()), equalTo("false")); } finally { rootLogger.removeAppender(mockAppender); rootLogger.setLevel(savedLevel); @@ -352,21 +388,20 @@ public class UpdateSettingsIT extends ESIntegTestCase { .setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "10000") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "10000") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "10000") + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "10000") )); assertFalse(mockAppender.sawUpdateMaxThreadCount); - // Now make a live change to reduce allowed merge threads: client() .admin() .indices() .prepareUpdateSettings("test") .setSettings(Settings.builder() - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") ) .get(); @@ -375,7 +410,7 @@ public class UpdateSettingsIT extends ESIntegTestCase { // Make sure setting says it is in fact changed: GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); - assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.MAX_THREAD_COUNT), equalTo("1")); + assertThat(getSettingsResponse.getSetting("test", MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey()), equalTo("1")); } finally { rootLogger.removeAppender(mockAppender); diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 4bf752886c9..65a4d5ab76d 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -171,7 +171,7 @@ public class RareClusterStateIT extends ESIntegTestCase { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/14932") public void testDeleteCreateInOneBulk() throws Exception { internalCluster().startNodesAsync(2, Settings.builder() - .put(DiscoveryModule.DISCOVERY_TYPE_KEY, "zen") + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") .build()).get(); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut()); prepareCreate("test").setSettings(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, true).addMapping("type").get(); diff --git a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java index 9522b79ea11..6eebc9e1f0c 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java @@ -139,7 +139,8 @@ public class SimpleIndexStateIT extends ESIntegTestCase { try { client().admin().indices().prepareCreate("test").setSettings(settingsBuilder().put("number_of_shards", "bad")).get(); fail(); - } catch (SettingsException ex) { + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [bad] for setting [index.number_of_shards]", ex.getMessage()); // Expected } diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index c6dbff842ef..756a9af43b1 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -78,9 +78,9 @@ public class IndexStatsIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { //Filter/Query cache is cleaned periodically, default is 60s, so make sure it runs often. Thread.sleep for 60s is bad return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) - .put(IndicesRequestCache.INDICES_CACHE_REQUEST_CLEAN_INTERVAL, "1ms") - .put(IndexModule.QUERY_CACHE_EVERYTHING, true) - .put(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE) + .put(IndicesRequestCache.INDICES_CACHE_REQUEST_CLEAN_INTERVAL.getKey(), "1ms") + .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) .build(); } @@ -185,7 +185,7 @@ public class IndexStatsIT extends ESIntegTestCase { } public void testQueryCache() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, true).get()); + assertAcked(client().admin().indices().prepareCreate("idx").setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true).get()); ensureGreen(); // index docs until we have at least one doc on each shard, otherwise, our tests will not work @@ -265,7 +265,7 @@ public class IndexStatsIT extends ESIntegTestCase { // set the index level setting to false, and see that the reverse works client().admin().indices().prepareClearCache().setRequestCache(true).get(); // clean the cache - assertAcked(client().admin().indices().prepareUpdateSettings("idx").setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, false))); + assertAcked(client().admin().indices().prepareUpdateSettings("idx").setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), false))); assertThat(client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits(), equalTo((long) numDocs)); assertThat(client().admin().indices().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), equalTo(0l)); @@ -277,13 +277,13 @@ public class IndexStatsIT extends ESIntegTestCase { public void testNonThrottleStats() throws Exception { assertAcked(prepareCreate("test") .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "merge") + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "merge") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "10000") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "10000") )); ensureGreen(); long termUpto = 0; @@ -309,15 +309,14 @@ public class IndexStatsIT extends ESIntegTestCase { public void testThrottleStats() throws Exception { assertAcked(prepareCreate("test") .setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "merge") + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "merge") .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1") .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE, "2") - .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER, "2") - .put(MergeSchedulerConfig.MAX_THREAD_COUNT, "1") - .put(MergeSchedulerConfig.MAX_MERGE_COUNT, "1") - .put("index.merge.policy.type", "tiered") - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC.name()) + .put(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGE_AT_ONCE_SETTING.getKey(), "2") + .put(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING.getKey(), "2") + .put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), "1") + .put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), "1") + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC.name()) )); ensureGreen(); long termUpto = 0; diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 9c9cb01e597..48f01ab0852 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -82,11 +83,11 @@ import static org.hamcrest.Matchers.equalTo; public class IndicesStoreIntegrationIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { // simplify this and only use a single data path - return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put("path.data", "") + return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put(Environment.PATH_DATA_SETTING.getKey(), "") // by default this value is 1 sec in tests (30 sec in practice) but we adding disruption here // which is between 1 and 2 sec can cause each of the shard deletion requests to timeout. // to prevent this we are setting the timeout here to something highish ie. the default in practice - .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(30, TimeUnit.SECONDS)) + .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT.getKey(), new TimeValue(30, TimeUnit.SECONDS)) .build(); } @@ -306,7 +307,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 3) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "_name", node4) + .put(IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "_name", node4) )); assertFalse(client().admin().cluster().prepareHealth().setWaitForRelocatingShards(0).setWaitForGreenStatus().setWaitForNodes("5").get().isTimedOut()); @@ -328,7 +329,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { logger.debug("--> allowing index to be assigned to node [{}]", node4); assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings( Settings.builder() - .put(FilterAllocationDecider.INDEX_ROUTING_EXCLUDE_GROUP + "_name", "NONE"))); + .put(IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "_name", "NONE"))); assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all"))); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index b32cfef76b6..5aaed6b5d94 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -321,21 +321,23 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get(); assertThat(response.getIndexTemplates(), empty()); - client().admin().indices().preparePutTemplate("template_1") + try { + client().admin().indices().preparePutTemplate("template_1") .setTemplate("te*") .setSettings(Settings.builder().put("does_not_exist", "test")) .get(); + fail(); + } catch (IllegalArgumentException ex) { + assertEquals("unknown setting [index.does_not_exist]", ex.getMessage()); + } response = client().admin().indices().prepareGetTemplates().get(); - assertThat(response.getIndexTemplates(), hasSize(1)); - assertThat(response.getIndexTemplates().get(0).getSettings().getAsMap().size(), equalTo(1)); - assertThat(response.getIndexTemplates().get(0).getSettings().get("index.does_not_exist"), equalTo("test")); + assertEquals(0, response.getIndexTemplates().size()); createIndex("test"); - //the wrong setting has no effect but does get stored among the index settings GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings("test").get(); - assertThat(getSettingsResponse.getIndexToSettings().get("test").getAsMap().get("index.does_not_exist"), equalTo("test")); + assertNull(getSettingsResponse.getIndexToSettings().get("test").getAsMap().get("index.does_not_exist")); } public void testIndexTemplateWithAliases() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java index 4dda068ddd6..442b1afa6ee 100644 --- a/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java +++ b/core/src/test/java/org/elasticsearch/node/internal/InternalSettingsPreparerTests.java @@ -48,7 +48,7 @@ public class InternalSettingsPreparerTests extends ESTestCase { @Before public void createBaseEnvSettings() { baseEnvSettings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); } @@ -68,7 +68,7 @@ public class InternalSettingsPreparerTests extends ESTestCase { assertNotNull(settings.get("name")); // a name was set assertNotNull(settings.get(ClusterName.SETTING)); // a cluster name was set assertEquals(settings.toString(), size + 1 /* path.home is in the base settings */, settings.names().size()); - String home = baseEnvSettings.get("path.home"); + String home = Environment.PATH_HOME_SETTING.get(baseEnvSettings); String configDir = env.configFile().toString(); assertTrue(configDir, configDir.startsWith(home)); } diff --git a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java index a0751dffac5..2a121be509c 100644 --- a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java +++ b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.options.detailederrors; import org.apache.http.impl.client.HttpClients; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.netty.NettyHttpServerTransport; @@ -43,8 +44,8 @@ public class DetailedErrorsDisabledIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put(Node.HTTP_ENABLED, true) - .put(NettyHttpServerTransport.SETTING_HTTP_DETAILED_ERRORS_ENABLED, false) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) + .put(NettyHttpServerTransport.SETTING_HTTP_DETAILED_ERRORS_ENABLED.getKey(), false) .build(); } diff --git a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java index 935b4e21ad2..4333d81b2ea 100644 --- a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java +++ b/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.options.detailederrors; import org.apache.http.impl.client.HttpClients; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.node.Node; @@ -42,7 +43,7 @@ public class DetailedErrorsEnabledIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put(Node.HTTP_ENABLED, true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .build(); } diff --git a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java index 811f010d099..abd158788f0 100644 --- a/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java @@ -33,12 +33,14 @@ import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import static org.elasticsearch.action.percolate.PercolateSourceBuilder.docBuilder; +import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.yamlBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; +import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.percolator.PercolatorTestUtil.convertFromTextArray; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertMatchCount; @@ -363,6 +365,33 @@ public class MultiPercolatorIT extends ESIntegTestCase { assertEquals(response.getItems()[1].getResponse().getMatches()[0].getId().string(), "Q"); } + public void testStartTimeIsPropagatedToShardRequests() throws Exception { + // See: https://github.com/elastic/elasticsearch/issues/15908 + internalCluster().ensureAtLeastNumDataNodes(2); + client().admin().indices().prepareCreate("test") + .setSettings(settingsBuilder() + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 1) + ) + .addMapping("type", "date_field", "type=date,format=strict_date_optional_time||epoch_millis") + .get(); + ensureGreen(); + + client().prepareIndex("test", ".percolator", "1") + .setSource(jsonBuilder().startObject().field("query", rangeQuery("date_field").lt("now+90d")).endObject()) + .setRefresh(true) + .get(); + + for (int i = 0; i < 32; i++) { + MultiPercolateResponse response = client().prepareMultiPercolate() + .add(client().preparePercolate().setDocumentType("type").setIndices("test") + .setPercolateDoc(new PercolateSourceBuilder.DocBuilder().setDoc("date_field", "2015-07-21T10:28:01-07:00"))) + .get(); + assertThat(response.getItems()[0].getResponse().getCount(), equalTo(1L)); + assertThat(response.getItems()[0].getResponse().getMatches()[0].getId().string(), equalTo("1")); + } + } + void initNestedIndexAndPercolation() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject().startObject("properties").startObject("companyname").field("type", "string").endObject() diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java index a60bc27c1be..a5d7ab5ba90 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolateDocumentParserTests.java @@ -57,8 +57,9 @@ import org.junit.Before; import org.mockito.Mockito; import java.util.Collections; -import java.util.Set; +import java.util.Map; +import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -66,27 +67,24 @@ import static org.hamcrest.Matchers.nullValue; public class PercolateDocumentParserTests extends ESTestCase { - private Index index; private MapperService mapperService; private PercolateDocumentParser parser; private QueryShardContext queryShardContext; + private PercolateShardRequest request; @Before public void init() { - index = new Index("_index"); IndexSettings indexSettings = new IndexSettings(new IndexMetaData.Builder("_index").settings( Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .build(), - Settings.EMPTY, Collections.emptyList() - ); + .build(), Settings.EMPTY); AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); IndicesModule indicesModule = new IndicesModule(); mapperService = new MapperService(indexSettings, analysisService, new SimilarityService(indexSettings, Collections.emptyMap()), indicesModule.getMapperRegistry(), () -> null); - Set parsers = Collections.singleton(new TermQueryParser()); - IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(indexSettings.getSettings(), parsers, new NamedWriteableRegistry()); + Map> parsers = singletonMap("term", new TermQueryParser()); + IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry(indexSettings.getSettings(), parsers); queryShardContext = new QueryShardContext(indexSettings, null, null, null, mapperService, null, null, indicesQueriesRegistry); @@ -99,6 +97,10 @@ public class PercolateDocumentParserTests extends ESTestCase { parser = new PercolateDocumentParser( highlightPhase, new SortParseElement(), aggregationPhase, mappingUpdatedAction ); + + request = Mockito.mock(PercolateShardRequest.class); + Mockito.when(request.shardId()).thenReturn(new ShardId(new Index("_index"), 0)); + Mockito.when(request.documentType()).thenReturn("type"); } public void testParseDoc() throws Exception { @@ -107,9 +109,7 @@ public class PercolateDocumentParserTests extends ESTestCase { .field("field1", "value1") .endObject() .endObject(); - PercolateShardRequest request = new PercolateShardRequest(new ShardId(index, 0), null); - request.documentType("type"); - request.source(source.bytes()); + Mockito.when(request.source()).thenReturn(source.bytes()); PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService); ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext); @@ -128,9 +128,7 @@ public class PercolateDocumentParserTests extends ESTestCase { .field("size", 123) .startObject("sort").startObject("_score").endObject().endObject() .endObject(); - PercolateShardRequest request = new PercolateShardRequest(new ShardId(index, 0), null); - request.documentType("type"); - request.source(source.bytes()); + Mockito.when(request.source()).thenReturn(source.bytes()); PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService); ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext); @@ -153,10 +151,8 @@ public class PercolateDocumentParserTests extends ESTestCase { XContentBuilder docSource = jsonBuilder().startObject() .field("field1", "value1") .endObject(); - PercolateShardRequest request = new PercolateShardRequest(new ShardId(index, 0), null); - request.documentType("type"); - request.source(source.bytes()); - request.docSource(docSource.bytes()); + Mockito.when(request.source()).thenReturn(source.bytes()); + Mockito.when(request.docSource()).thenReturn(docSource.bytes()); PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService); ParsedDocument parsedDocument = parser.parse(request, context, mapperService, queryShardContext); @@ -182,10 +178,8 @@ public class PercolateDocumentParserTests extends ESTestCase { XContentBuilder docSource = jsonBuilder().startObject() .field("field1", "value1") .endObject(); - PercolateShardRequest request = new PercolateShardRequest(new ShardId(index, 0), null); - request.documentType("type"); - request.source(source.bytes()); - request.docSource(docSource.bytes()); + Mockito.when(request.source()).thenReturn(source.bytes()); + Mockito.when(request.docSource()).thenReturn(docSource.bytes()); PercolateContext context = new PercolateContext(request, new SearchShardTarget("_node", "_index", 0), mapperService); try { diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java index c32632bbf1d..b8ed2cc0e28 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java @@ -202,7 +202,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { } public void testSingleShardAggregations() throws Exception { - assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("SETTING_NUMBER_OF_SHARDS", 1)) + assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1)) .addMapping("type", "field1", "type=string", "field2", "type=string")); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java index cb8ffb8e91f..9378eaa7542 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.env.Environment; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -113,7 +114,7 @@ public class PercolatorBackwardsCompatibilityIT extends ESIntegTestCase { } Settings.Builder nodeSettings = Settings.builder() - .put("path.data", dataDir); + .put(Environment.PATH_DATA_SETTING.getKey(), dataDir); internalCluster().startNode(nodeSettings.build()); ensureGreen(INDEX_NAME); } diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index 0c16c981847..4a15b65382c 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -163,12 +163,6 @@ public class PercolatorIT extends ESIntegTestCase { assertThat(response.getMatches(), arrayWithSize(1)); assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("4")); - logger.info("--> Search dummy doc, percolate queries must not be included"); - SearchResponse searchResponse = client().prepareSearch("test", "test").execute().actionGet(); - assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).type(), equalTo("type")); - assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1")); - logger.info("--> Percolate non existing doc"); try { client().preparePercolate() @@ -657,14 +651,6 @@ public class PercolatorIT extends ESIntegTestCase { assertMatchCount(response, 1l); assertThat(response.getMatches(), arrayWithSize(1)); assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("4")); - - logger.info("--> Search normals docs, percolate queries must not be included"); - SearchResponse searchResponse = client().prepareSearch("test").execute().actionGet(); - assertThat(searchResponse.getHits().totalHits(), equalTo(4L)); - assertThat(searchResponse.getHits().getAt(0).type(), equalTo("type")); - assertThat(searchResponse.getHits().getAt(1).type(), equalTo("type")); - assertThat(searchResponse.getHits().getAt(2).type(), equalTo("type")); - assertThat(searchResponse.getHits().getAt(3).type(), equalTo("type")); } public void testPercolatingExistingDocs_routing() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java index 49635abc8de..d9c784da2b1 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorServiceTests.java @@ -163,9 +163,7 @@ public class PercolatorServiceTests extends ESTestCase { Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .build(), - Settings.EMPTY, Collections.emptyList() - ); + .build(), Settings.EMPTY); return new PercolatorQueriesRegistry( new ShardId(index, 0), indexSettings, diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java index deaff46f27b..37a0f4e358e 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginInfoTests.java @@ -40,17 +40,13 @@ public class PluginInfoTests extends ESTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); PluginInfo info = PluginInfo.readFromProperties(pluginDir); assertEquals("my_plugin", info.getName()); assertEquals("fake desc", info.getDescription()); assertEquals("1.0", info.getVersion()); assertEquals("FakePlugin", info.getClassname()); - assertTrue(info.isJvm()); assertTrue(info.isIsolated()); - assertFalse(info.isSite()); - assertNull(info.getUrl()); } public void testReadFromPropertiesNameMissing() throws Exception { @@ -94,27 +90,12 @@ public class PluginInfoTests extends ESTestCase { } } - public void testReadFromPropertiesJvmAndSiteMissing() throws Exception { - Path pluginDir = createTempDir().resolve("fake-plugin"); - PluginTestUtil.writeProperties(pluginDir, - "description", "fake desc", - "version", "1.0", - "name", "my_plugin"); - try { - PluginInfo.readFromProperties(pluginDir); - fail("expected jvm or site exception"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("must be at least a jvm or site plugin")); - } - } - public void testReadFromPropertiesElasticsearchVersionMissing() throws Exception { Path pluginDir = createTempDir().resolve("fake-plugin"); PluginTestUtil.writeProperties(pluginDir, "description", "fake desc", "name", "my_plugin", - "version", "1.0", - "jvm", "true"); + "version", "1.0"); try { PluginInfo.readFromProperties(pluginDir); fail("expected missing elasticsearch version exception"); @@ -129,8 +110,7 @@ public class PluginInfoTests extends ESTestCase { "description", "fake desc", "name", "my_plugin", "elasticsearch.version", Version.CURRENT.toString(), - "version", "1.0", - "jvm", "true"); + "version", "1.0"); try { PluginInfo.readFromProperties(pluginDir); fail("expected missing java version exception"); @@ -148,8 +128,7 @@ public class PluginInfoTests extends ESTestCase { "elasticsearch.version", Version.CURRENT.toString(), "java.version", "1000000.0", "classname", "FakePlugin", - "version", "1.0", - "jvm", "true"); + "version", "1.0"); try { PluginInfo.readFromProperties(pluginDir); fail("expected incompatible java version exception"); @@ -167,8 +146,7 @@ public class PluginInfoTests extends ESTestCase { "elasticsearch.version", Version.CURRENT.toString(), "java.version", "1.7.0_80", "classname", "FakePlugin", - "version", "1.0", - "jvm", "true"); + "version", "1.0"); try { PluginInfo.readFromProperties(pluginDir); fail("expected bad java version format exception"); @@ -182,7 +160,6 @@ public class PluginInfoTests extends ESTestCase { PluginTestUtil.writeProperties(pluginDir, "description", "fake desc", "version", "1.0", - "jvm", "true", "name", "my_plugin", "elasticsearch.version", "bogus"); try { @@ -199,7 +176,6 @@ public class PluginInfoTests extends ESTestCase { "description", "fake desc", "name", "my_plugin", "version", "1.0", - "jvm", "true", "elasticsearch.version", Version.V_1_7_0.toString()); try { PluginInfo.readFromProperties(pluginDir); @@ -216,8 +192,7 @@ public class PluginInfoTests extends ESTestCase { "name", "my_plugin", "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), - "java.version", System.getProperty("java.specification.version"), - "jvm", "true"); + "java.version", System.getProperty("java.specification.version")); try { PluginInfo.readFromProperties(pluginDir); fail("expected old elasticsearch version exception"); @@ -226,42 +201,13 @@ public class PluginInfoTests extends ESTestCase { } } - public void testReadFromPropertiesSitePlugin() throws Exception { - Path pluginDir = createTempDir().resolve("fake-plugin"); - Files.createDirectories(pluginDir.resolve("_site")); - PluginTestUtil.writeProperties(pluginDir, - "description", "fake desc", - "name", "my_plugin", - "version", "1.0", - "site", "true"); - PluginInfo info = PluginInfo.readFromProperties(pluginDir); - assertTrue(info.isSite()); - assertFalse(info.isJvm()); - assertEquals("NA", info.getClassname()); - } - - public void testReadFromPropertiesSitePluginWithoutSite() throws Exception { - Path pluginDir = createTempDir().resolve("fake-plugin"); - PluginTestUtil.writeProperties(pluginDir, - "description", "fake desc", - "name", "my_plugin", - "version", "1.0", - "site", "true"); - try { - PluginInfo.readFromProperties(pluginDir); - fail("didn't get expected exception"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("site plugin but has no '_site")); - } - } - public void testPluginListSorted() { PluginsAndModules pluginsInfo = new PluginsAndModules(); - pluginsInfo.addPlugin(new PluginInfo("c", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.addPlugin(new PluginInfo("b", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.addPlugin(new PluginInfo("e", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.addPlugin(new PluginInfo("a", "foo", true, "dummy", true, "dummyclass", true)); - pluginsInfo.addPlugin(new PluginInfo("d", "foo", true, "dummy", true, "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("c", "foo", "dummy", "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("b", "foo", "dummy", "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("e", "foo", "dummy", "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("a", "foo", "dummy", "dummyclass", true)); + pluginsInfo.addPlugin(new PluginInfo("d", "foo", "dummy", "dummyclass", true)); final List infos = pluginsInfo.getPluginInfos(); List names = infos.stream().map((input) -> input.getName()).collect(Collectors.toList()); diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 660f1015c3d..d3c1f1b8bad 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -42,7 +42,7 @@ public class PluginsServiceTests extends ESTestCase { } @Override public Settings additionalSettings() { - return Settings.builder().put("foo.bar", "1").put(IndexModule.STORE_TYPE, IndexModule.Type.MMAPFS.getSettingsKey()).build(); + return Settings.builder().put("foo.bar", "1").put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.MMAPFS.getSettingsKey()).build(); } } public static class AdditionalSettingsPlugin2 extends Plugin { @@ -88,19 +88,19 @@ public class PluginsServiceTests extends ESTestCase { public void testAdditionalSettings() { Settings settings = Settings.builder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("my.setting", "test") - .put(IndexModule.STORE_TYPE, IndexModule.Type.SIMPLEFS.getSettingsKey()).build(); + .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.SIMPLEFS.getSettingsKey()).build(); PluginsService service = newPluginsService(settings, AdditionalSettingsPlugin1.class); Settings newSettings = service.updatedSettings(); assertEquals("test", newSettings.get("my.setting")); // previous settings still exist assertEquals("1", newSettings.get("foo.bar")); // added setting exists - assertEquals(IndexModule.Type.SIMPLEFS.getSettingsKey(), newSettings.get(IndexModule.STORE_TYPE)); // does not override pre existing settings + assertEquals(IndexModule.Type.SIMPLEFS.getSettingsKey(), newSettings.get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey())); // does not override pre existing settings } public void testAdditionalSettingsClash() { Settings settings = Settings.builder() - .put("path.home", createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); PluginsService service = newPluginsService(settings, AdditionalSettingsPlugin1.class, AdditionalSettingsPlugin2.class); try { service.updatedSettings(); @@ -115,7 +115,7 @@ public class PluginsServiceTests extends ESTestCase { public void testOnModuleExceptionsArePropagated() { Settings settings = Settings.builder() - .put("path.home", createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); PluginsService service = newPluginsService(settings, FailOnModule.class); try { service.processModule(new BrokenModule()); diff --git a/core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java b/core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java deleted file mode 100644 index e2df2518f1c..00000000000 --- a/core/src/test/java/org/elasticsearch/plugins/SitePluginIT.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugins; - -import org.apache.http.client.config.RequestConfig; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; -import org.elasticsearch.test.rest.client.http.HttpResponse; - -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.rest.RestStatus.FORBIDDEN; -import static org.elasticsearch.rest.RestStatus.MOVED_PERMANENTLY; -import static org.elasticsearch.rest.RestStatus.NOT_FOUND; -import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus; -import static org.hamcrest.Matchers.containsString; - -/** - * We want to test site plugins - */ -@ClusterScope(scope = Scope.SUITE, numDataNodes = 1) -public class SitePluginIT extends ESIntegTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - Path pluginDir = getDataPath("/org/elasticsearch/test_plugins"); - return settingsBuilder() - .put(super.nodeSettings(nodeOrdinal)) - .put("path.plugins", pluginDir.toAbsolutePath()) - .put("force.http.enabled", true) - .build(); - } - - @Override - public HttpRequestBuilder httpClient() { - RequestConfig.Builder builder = RequestConfig.custom().setRedirectsEnabled(false); - CloseableHttpClient httpClient = HttpClients.custom().setDefaultRequestConfig(builder.build()).build(); - return new HttpRequestBuilder(httpClient).httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class)); - } - - public void testRedirectSitePlugin() throws Exception { - // We use an HTTP Client to test redirection - HttpResponse response = httpClient().method("GET").path("/_plugin/dummy").execute(); - assertThat(response, hasStatus(MOVED_PERMANENTLY)); - assertThat(response.getBody(), containsString("/_plugin/dummy/")); - - // We test the real URL - response = httpClient().method("GET").path("/_plugin/dummy/").execute(); - assertThat(response, hasStatus(OK)); - assertThat(response.getBody(), containsString("Dummy Site Plugin")); - } - - /** - * Test direct access to an existing file (index.html) - */ - public void testAnyPage() throws Exception { - HttpResponse response = httpClient().path("/_plugin/dummy/index.html").execute(); - assertThat(response, hasStatus(OK)); - assertThat(response.getBody(), containsString("Dummy Site Plugin")); - } - - /** - * Test normalizing of path - */ - public void testThatPathsAreNormalized() throws Exception { - // more info: https://www.owasp.org/index.php/Path_Traversal - List notFoundUris = new ArrayList<>(); - notFoundUris.add("/_plugin/dummy/../../../../../log4j.properties"); - notFoundUris.add("/_plugin/dummy/../../../../../%00log4j.properties"); - notFoundUris.add("/_plugin/dummy/..%c0%af..%c0%af..%c0%af..%c0%af..%c0%aflog4j.properties"); - notFoundUris.add("/_plugin/dummy/%2E%2E/%2E%2E/%2E%2E/%2E%2E/index.html"); - notFoundUris.add("/_plugin/dummy/%2e%2e/%2e%2e/%2e%2e/%2e%2e/index.html"); - notFoundUris.add("/_plugin/dummy/%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2findex.html"); - notFoundUris.add("/_plugin/dummy/%2E%2E/%2E%2E/%2E%2E/%2E%2E/index.html"); - notFoundUris.add("/_plugin/dummy/..%5C..%5C..%5C..%5C..%5Clog4j.properties"); - - for (String uri : notFoundUris) { - HttpResponse response = httpClient().path(uri).execute(); - String message = String.format(Locale.ROOT, "URI [%s] expected to be not found", uri); - assertThat(message, response, hasStatus(NOT_FOUND)); - } - - // using relative path inside of the plugin should work - HttpResponse response = httpClient().path("/_plugin/dummy/dir1/../dir1/../index.html").execute(); - assertThat(response, hasStatus(OK)); - assertThat(response.getBody(), containsString("Dummy Site Plugin")); - } - - /** - * Test case for #4845: https://github.com/elasticsearch/elasticsearch/issues/4845 - * Serving _site plugins do not pick up on index.html for sub directories - */ - public void testWelcomePageInSubDirs() throws Exception { - HttpResponse response = httpClient().path("/_plugin/subdir/dir/").execute(); - assertThat(response, hasStatus(OK)); - assertThat(response.getBody(), containsString("Dummy Site Plugin (subdir)")); - - response = httpClient().path("/_plugin/subdir/dir_without_index/").execute(); - assertThat(response, hasStatus(FORBIDDEN)); - - response = httpClient().path("/_plugin/subdir/dir_without_index/page.html").execute(); - assertThat(response, hasStatus(OK)); - assertThat(response.getBody(), containsString("Dummy Site Plugin (page)")); - } -} diff --git a/core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java b/core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java deleted file mode 100644 index 1cde90d6984..00000000000 --- a/core/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigIT.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.plugins; - -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; -import org.elasticsearch.test.rest.client.http.HttpResponse; - -import java.nio.file.Path; - -import static org.apache.lucene.util.Constants.WINDOWS; -import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus; - -@ClusterScope(scope = SUITE, numDataNodes = 1) -public class SitePluginRelativePathConfigIT extends ESIntegTestCase { - private final Path root = PathUtils.get(".").toAbsolutePath().getRoot(); - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - String cwdToRoot = getRelativePath(PathUtils.get(".").toAbsolutePath()); - Path pluginDir = PathUtils.get(cwdToRoot, relativizeToRootIfNecessary(getDataPath("/org/elasticsearch/test_plugins")).toString()); - - Path tempDir = createTempDir(); - boolean useRelativeInMiddleOfPath = randomBoolean(); - if (useRelativeInMiddleOfPath) { - pluginDir = PathUtils.get(tempDir.toString(), getRelativePath(tempDir), pluginDir.toString()); - } - - return settingsBuilder() - .put(super.nodeSettings(nodeOrdinal)) - .put("path.plugins", pluginDir) - .put("force.http.enabled", true) - .build(); - } - - public void testThatRelativePathsDontAffectPlugins() throws Exception { - HttpResponse response = httpClient().method("GET").path("/_plugin/dummy/").execute(); - assertThat(response, hasStatus(OK)); - } - - private Path relativizeToRootIfNecessary(Path path) { - if (WINDOWS) { - return root.relativize(path); - } - return path; - } - - private String getRelativePath(Path path) { - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < path.getNameCount(); i++) { - sb.append(".."); - sb.append(path.getFileSystem().getSeparator()); - } - - return sb.toString(); - } - - @Override - public HttpRequestBuilder httpClient() { - CloseableHttpClient httpClient = HttpClients.createDefault(); - return new HttpRequestBuilder(httpClient).httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class)); - } -} diff --git a/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java index 8d33758ef26..663d951a087 100644 --- a/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java @@ -57,7 +57,7 @@ public class FullRollingRestartIT extends ESIntegTestCase { } public void testFullRollingRestart() throws Exception { - Settings settings = Settings.builder().put(ZenDiscovery.SETTING_JOIN_TIMEOUT, "30s").build(); + Settings settings = Settings.builder().put(ZenDiscovery.JOIN_TIMEOUT_SETTING.getKey(), "30s").build(); internalCluster().startNode(settings); createIndex("test"); @@ -141,7 +141,7 @@ public class FullRollingRestartIT extends ESIntegTestCase { * to relocating to the restarting node since all had 2 shards and now one node has nothing allocated. * We have a fix for this to wait until we have allocated unallocated shards now so this shouldn't happen. */ - prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6").put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0").put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMinutes(1))).get(); + prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6").put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0").put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get(); for (int i = 0; i < 100; i++) { client().prepareIndex("test", "type1", Long.toString(i)) diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 438445d538a..e9349a97d7d 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -55,7 +55,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -108,7 +108,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasRelocatePrimariesTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 1, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -159,7 +159,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadWithReducedAllowedNodes() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 2, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 2, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -230,7 +230,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { final int numReplicas = 0; logger.info("--> creating test index ..."); int allowNodes = 2; - assertAcked(prepareCreate("test", 3, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY, Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 3, settingsBuilder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int numDocs = scaledRandomIntBetween(200, 9999); diff --git a/core/src/test/java/org/elasticsearch/recovery/SmallFileChunkSizeRecoveryIT.java b/core/src/test/java/org/elasticsearch/recovery/SmallFileChunkSizeRecoveryIT.java deleted file mode 100644 index aba0f38af85..00000000000 --- a/core/src/test/java/org/elasticsearch/recovery/SmallFileChunkSizeRecoveryIT.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.recovery; - -import org.elasticsearch.common.settings.Settings; - -/** - * - */ -public class SmallFileChunkSizeRecoveryIT extends SimpleRecoveryIT { - - @Override - protected Settings recoverySettings() { - return Settings.settingsBuilder().put("index.shard.recovery.file_chunk_size", "3b").build(); - } -} diff --git a/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java b/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java index 2b7533cae19..f2ce16ac857 100644 --- a/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java +++ b/core/src/test/java/org/elasticsearch/rest/CorsRegexDefaultIT.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.rest; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; @@ -35,7 +36,7 @@ public class CorsRegexDefaultIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() - .put(Node.HTTP_ENABLED, true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .put(super.nodeSettings(nodeOrdinal)).build(); } diff --git a/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java b/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java index 3828ae0ad74..1c624f98e2f 100644 --- a/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java +++ b/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.rest; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; @@ -47,9 +48,9 @@ public class CorsRegexIT extends ESIntegTestCase { return Settings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .put(SETTING_CORS_ALLOW_ORIGIN, "/https?:\\/\\/localhost(:[0-9]+)?/") - .put(SETTING_CORS_ALLOW_CREDENTIALS, true) - .put(SETTING_CORS_ENABLED, true) - .put(Node.HTTP_ENABLED, true) + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .build(); } diff --git a/core/src/test/java/org/elasticsearch/rest/NoOpClient.java b/core/src/test/java/org/elasticsearch/rest/NoOpClient.java index 245bdb96a33..492c2cd43ed 100644 --- a/core/src/test/java/org/elasticsearch/rest/NoOpClient.java +++ b/core/src/test/java/org/elasticsearch/rest/NoOpClient.java @@ -39,7 +39,7 @@ public class NoOpClient extends AbstractClient { } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute(Action action, Request request, ActionListener listener) { listener.onResponse(null); } diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index 987aef90bc3..8ef7bcc41f5 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -39,7 +39,7 @@ public class FileScriptTests extends ESTestCase { Path mockscript = scriptsDir.resolve("script1.mockscript"); Files.write(mockscript, "1".getBytes("UTF-8")); settings = Settings.builder() - .put("path.home", homeDir) + .put(Environment.PATH_HOME_SETTING.getKey(), homeDir) // no file watching, so we don't need a ResourceWatcherService .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) .put(settings) diff --git a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java index 47adeabe02f..0e8d477d0e3 100644 --- a/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/NativeScriptTests.java @@ -50,7 +50,7 @@ public class NativeScriptTests extends ESTestCase { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings settings = Settings.settingsBuilder() .put("name", "testNativeScript") - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); ScriptModule scriptModule = new ScriptModule(settings); scriptModule.registerScript("my", MyNativeScriptFactory.class); @@ -78,7 +78,7 @@ public class NativeScriptTests extends ESTestCase { String scriptContext = randomFrom(ScriptContext.Standard.values()).getKey(); builder.put(ScriptModes.SCRIPT_SETTINGS_PREFIX + scriptContext, randomFrom(ScriptMode.values())); } - Settings settings = builder.put("path.home", createTempDir()).build(); + Settings settings = builder.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); Environment environment = new Environment(settings); ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, null); Map nativeScriptFactoryMap = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index 019eb7c74a0..36865f254bd 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -38,7 +38,7 @@ public class ScriptContextTests extends ESTestCase { ScriptService makeScriptService() throws Exception { Settings settings = Settings.builder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) // no file watching, so we don't need a ResourceWatcherService .put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING, false) .put("script." + PLUGIN_NAME + "_custom_globally_disabled_op", false) diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 3c939e7e91a..2028605d844 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -67,8 +67,8 @@ public class ScriptServiceTests extends ESTestCase { public void setup() throws IOException { Path genericConfigFolder = createTempDir(); baseSettings = settingsBuilder() - .put("path.home", createTempDir().toString()) - .put("path.conf", genericConfigFolder) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder) .build(); resourceWatcherService = new ResourceWatcherService(baseSettings, null); scriptEngineService = new TestEngineService(); @@ -378,7 +378,7 @@ public class ScriptServiceTests extends ESTestCase { public void testCompilationStatsOnCacheHit() throws IOException { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings.Builder builder = Settings.builder(); - builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1); + builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); buildScriptService(builder.build()); scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); @@ -403,7 +403,7 @@ public class ScriptServiceTests extends ESTestCase { public void testCacheEvictionCountedInCacheEvictionsStats() throws IOException { ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder(); Settings.Builder builder = Settings.builder(); - builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1); + builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), 1); buildScriptService(builder.build()); scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders, Collections.emptyMap()); diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 15313095650..b75d9634f69 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -18,19 +18,29 @@ */ package org.elasticsearch.search; +import java.io.IOException; + import org.elasticsearch.common.inject.ModuleTestCase; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryParser; +import org.elasticsearch.index.query.TermQueryParser; import org.elasticsearch.search.highlight.CustomHighlighter; import org.elasticsearch.search.highlight.Highlighter; import org.elasticsearch.search.highlight.PlainHighlighter; import org.elasticsearch.search.suggest.CustomSuggester; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.phrase.PhraseSuggester; + +import static org.hamcrest.Matchers.containsString; /** */ public class SearchModuleTests extends ModuleTestCase { public void testDoubleRegister() { - SearchModule module = new SearchModule(); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); try { module.registerHighlighter("fvh", PlainHighlighter.class); } catch (IllegalArgumentException e) { @@ -45,7 +55,7 @@ public class SearchModuleTests extends ModuleTestCase { } public void testRegisterSuggester() { - SearchModule module = new SearchModule(); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); module.registerSuggester("custom", CustomSuggester.class); try { module.registerSuggester("custom", CustomSuggester.class); @@ -56,7 +66,7 @@ public class SearchModuleTests extends ModuleTestCase { } public void testRegisterHighlighter() { - SearchModule module = new SearchModule(); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); module.registerHighlighter("custom", CustomHighlighter.class); try { module.registerHighlighter("custom", CustomHighlighter.class); @@ -65,4 +75,32 @@ public class SearchModuleTests extends ModuleTestCase { } assertMapMultiBinding(module, Highlighter.class, CustomHighlighter.class); } + + public void testRegisterQueryParserDuplicate() { + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); + module.registerQueryParser(TermQueryParser::new); + try { + module.buildQueryParserRegistry(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("already registered for name [term] while trying to register [org.elasticsearch.index.")); + } + } + + static class FakeQueryParser implements QueryParser { + @Override + public String[] names() { + return new String[] {"fake-query-parser"}; + } + + @Override + public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { + return null; + } + + @Override + public QueryBuilder getBuilderPrototype() { + return null; + } + } + } diff --git a/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java b/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java index addfe14c488..9ea5ec93f1f 100644 --- a/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java +++ b/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java @@ -40,7 +40,7 @@ public class StressSearchServiceReaperIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { // very frequent checks return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - .put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueMillis(1)).build(); + .put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(1)).build(); } // see issue #5165 - this test fails each time without the fix in pull #5170 diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index 3400766ca01..278db793fc4 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -19,9 +19,7 @@ package org.elasticsearch.search.aggregations; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.inject.AbstractModule; @@ -51,7 +49,6 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.TestSearchContext; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; import org.junit.After; @@ -99,10 +96,9 @@ public abstract class BaseAggregationTestCase exte Settings settings = Settings.settingsBuilder() .put("name", BaseAggregationTestCase.class.toString()) .put("path.home", createTempDir()) - .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), - Version.V_1_0_0, Version.CURRENT)) .build(); + namedWriteableRegistry = new NamedWriteableRegistry(); index = new Index("test"); injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), @@ -113,15 +109,16 @@ public abstract class BaseAggregationTestCase exte @Override protected void configure() { - bindQueryParsersExtension(); + bindMapperExtension(); } - }, new SearchModule() { + }, new SearchModule(settings, namedWriteableRegistry) { @Override - protected void configure() { - configureAggs(); - configureHighlighters(); - configureFetchSubPhase(); - configureFunctionScore(); + protected void configureSearch() { + // Skip me + } + @Override + protected void configureSuggesters() { + // Skip me } }, new IndexSettingsModule(index, settings), @@ -131,7 +128,7 @@ public abstract class BaseAggregationTestCase exte protected void configure() { bind(ClusterService.class).toProvider(Providers.of((ClusterService) null)); bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); - bind(NamedWriteableRegistry.class).asEagerSingleton(); + bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); } } ).createInjector(); @@ -142,7 +139,6 @@ public abstract class BaseAggregationTestCase exte String type = randomAsciiOfLengthBetween(1, 10); currentTypes[i] = type; } - namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class); queriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); parseFieldMatcher = ParseFieldMatcher.STRICT; } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index 58e780fb995..30b6584533d 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -19,9 +19,7 @@ package org.elasticsearch.search.aggregations; -import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.inject.AbstractModule; @@ -52,7 +50,6 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.TestSearchContext; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; import org.junit.After; @@ -100,10 +97,9 @@ public abstract class BasePipelineAggregationTestCase> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java index 4fbbef8a58d..672447a6811 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java @@ -30,15 +30,18 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid.Bucket; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Random; @@ -53,6 +56,12 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; @ESIntegTestCase.SuiteScopeTestCase public class GeoHashGridIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); static ObjectIntMap expectedDocCountsForGeoHash = null; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ParentIdAggIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ParentIdAggIT.java index 44bd22af1c9..bff5e7d0fc5 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ParentIdAggIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ParentIdAggIT.java @@ -47,11 +47,11 @@ public class ParentIdAggIT extends ESIntegTestCase { refresh(); ensureGreen("testidx"); - SearchResponse searchResponse = client().prepareSearch("testidx").setTypes("childtype").setQuery(matchAllQuery()).addAggregation(AggregationBuilders.terms("children").field("_parent")).get(); + SearchResponse searchResponse = client().prepareSearch("testidx").setTypes("childtype").setQuery(matchAllQuery()).addAggregation(AggregationBuilders.terms("children").field("_parent#parenttype")).get(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); assertSearchResponse(searchResponse); assertThat(searchResponse.getAggregations().getAsMap().get("children"), instanceOf(Terms.class)); Terms terms = (Terms) searchResponse.getAggregations().getAsMap().get("children"); assertThat(terms.getBuckets().iterator().next().getDocCount(), equalTo(2l)); } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 173f9ddfef2..eb4143017bf 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -177,7 +177,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } public void onModule(SearchModule significanceModule) { - significanceModule.registerHeuristicParser(SimpleHeuristic.SimpleHeuristicParser.class); + significanceModule.registerHeuristicParser(new SimpleHeuristic.SimpleHeuristicParser()); } public void onModule(ScriptModule module) { module.registerScript(NativeSignificanceScoreScriptNoParams.NATIVE_SIGNIFICANCE_SCORE_SCRIPT_NO_PARAMS, NativeSignificanceScoreScriptNoParams.Factory.class); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 420fe12ad20..3a90c8e00d3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.shard.ShardId; @@ -118,7 +119,7 @@ public class NestedAggregatorTests extends ESSingleNodeTestCase { IndexSearcher searcher = new IndexSearcher(directoryReader); IndexService indexService = createIndex("test"); - indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true, false); + indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), MapperService.MergeReason.MAPPING_UPDATE, false); SearchContext searchContext = createSearchContext(indexService); AggregationContext context = new AggregationContext(searchContext); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 12658a86062..865a3513bd4 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.InputStreamStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -31,6 +30,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -195,7 +195,7 @@ public class SignificanceHeuristicTests extends ESTestCase { public void testBuilderAndParser() throws Exception { Set parsers = new HashSet<>(); - SignificanceHeuristicParserMapper heuristicParserMapper = new SignificanceHeuristicParserMapper(parsers, null); + SignificanceHeuristicParserMapper heuristicParserMapper = new SignificanceHeuristicParserMapper(parsers); SearchContext searchContext = new SignificantTermsTestSearchContext(); // test jlh with string @@ -235,7 +235,7 @@ public class SignificanceHeuristicTests extends ESTestCase { protected void checkParseException(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, String faultyHeuristicDefinition, String expectedError) throws IOException { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, new HashSet<>(), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, new HashMap>()); try { XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); QueryParseContext parseContext = new QueryParseContext(registry); @@ -260,7 +260,7 @@ public class SignificanceHeuristicTests extends ESTestCase { private SignificanceHeuristic parseSignificanceHeuristic(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, XContentParser stParser) throws IOException { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, new HashSet<>(), new NamedWriteableRegistry()); + IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, new HashMap>()); QueryParseContext parseContext = new QueryParseContext(registry); parseContext.reset(stParser); parseContext.parseFieldMatcher(ParseFieldMatcher.STRICT); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java index db02d6ccb03..cb819a7027b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java @@ -388,12 +388,6 @@ public class AvgIT extends AbstractNumericTestCase { final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); return new LeafSearchScript() { - - @Override - public Object unwrap(Object value) { - return null; - } - @Override public void setNextVar(String name, Object value) { } @@ -526,12 +520,6 @@ public class AvgIT extends AbstractNumericTestCase { final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); return new LeafSearchScript() { - - @Override - public Object unwrap(Object value) { - throw new UnsupportedOperationException(); - } - @Override public void setNextVar(String name, Object value) { vars.put(name, value); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 2c27bde57dc..39c865eae35 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -384,12 +384,6 @@ public class SumIT extends AbstractNumericTestCase { final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); return new LeafSearchScript() { - - @Override - public Object unwrap(Object value) { - return null; - } - @Override public void setNextVar(String name, Object value) { } diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index 1f2ef1a28d1..c45b04f7825 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -28,8 +28,10 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.MockEngineFactoryPlugin; @@ -52,7 +54,7 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class); + return pluginList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class, MockEngineFactoryPlugin.class); } public void testRandomExceptions() throws IOException, InterruptedException, ExecutionException { @@ -151,10 +153,16 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { public static class RandomExceptionDirectoryReaderWrapper extends MockEngineSupport.DirectoryReaderWrapper { public static class TestPlugin extends Plugin { + public static final Setting EXCEPTION_TOP_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting EXCEPTION_LOW_LEVEL_RATIO_SETTING = Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, false, Setting.Scope.INDEX); @Override public String name() { return "random-exception-reader-wrapper"; } + public void onModule(SettingsModule module) { + module.registerSetting(EXCEPTION_TOP_LEVEL_RATIO_SETTING); + module.registerSetting(EXCEPTION_LOW_LEVEL_RATIO_SETTING); + } @Override public String description() { return "a mock reader wrapper that throws random exceptions for testing"; @@ -172,7 +180,7 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { private final double lowLevelRatio; ThrowingSubReaderWrapper(Settings settings) { - final long seed = settings.getAsLong(SETTING_INDEX_SEED, 0l); + final long seed = ESIntegTestCase.INDEX_TEST_SEED_SETTING.get(settings); this.topLevelRatio = settings.getAsDouble(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d); this.lowLevelRatio = settings.getAsDouble(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d); this.random = new Random(seed); diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java index 1708d10f96f..a6b76359cff 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -102,15 +102,15 @@ public class SearchWithRandomIOExceptionsIT extends ESIntegTestCase { client().admin().indices().prepareFlush("test").setWaitIfOngoing(true).execute().get(); client().admin().indices().prepareClose("test").execute().get(); client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder() - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE, exceptionRate) - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN, exceptionOnOpenRate)); + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING.getKey(), exceptionRate) + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), exceptionOnOpenRate)); client().admin().indices().prepareOpen("test").execute().get(); } else { Settings.Builder settings = settingsBuilder() .put("index.number_of_replicas", randomIntBetween(0, 1)) - .put(MockFSIndexStore.CHECK_INDEX_ON_CLOSE, false) - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE, exceptionRate) - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN, exceptionOnOpenRate); // we cannot expect that the index will be valid + .put(MockFSIndexStore.INDEX_CHECK_INDEX_ON_CLOSE_SETTING.getKey(), false) + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING.getKey(), exceptionRate) + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), exceptionOnOpenRate); // we cannot expect that the index will be valid logger.info("creating index: [test] using settings: [{}]", settings.build().getAsMap()); client().admin().indices().prepareCreate("test") .setSettings(settings) @@ -184,8 +184,8 @@ public class SearchWithRandomIOExceptionsIT extends ESIntegTestCase { // check the index still contains the records that we indexed without errors client().admin().indices().prepareClose("test").execute().get(); client().admin().indices().prepareUpdateSettings("test").setSettings(settingsBuilder() - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE, 0) - .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN, 0)); + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING.getKey(), 0) + .put(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), 0)); client().admin().indices().prepareOpen("test").execute().get(); ensureGreen(); SearchResponse searchResponse = client().prepareSearch().setTypes("type").setQuery(QueryBuilders.matchQuery("test", "init")).get(); diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index cbc7f93ff5a..3d3388b87b4 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -54,7 +54,7 @@ public class TransportSearchFailuresIT extends ESIntegTestCase { public void testFailedSearchWithWrongQuery() throws Exception { logger.info("Start Testing failed search with wrong query"); - assertAcked(prepareCreate("test", 1, settingsBuilder().put("routing.hash.type", "simple"))); + assertAcked(prepareCreate("test", 1)); ensureYellow(); NumShards test = getNumShards("test"); diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 6d239a8cdb0..eedc4d9e79f 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -82,8 +82,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { Set fullExpectedIds = new TreeSet<>(); Settings.Builder settingsBuilder = settingsBuilder() - .put(indexSettings()) - .put("routing.hash.type", "simple"); + .put(indexSettings()); if (numShards > 0) { settingsBuilder.put(SETTING_NUMBER_OF_SHARDS, numShards); @@ -122,8 +121,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { public void testDfsQueryThenFetch() throws Exception { Settings.Builder settingsBuilder = settingsBuilder() - .put(indexSettings()) - .put("routing.hash.type", "simple"); + .put(indexSettings()); client().admin().indices().create(createIndexRequest("test") .settings(settingsBuilder)) .actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 92bc6af6979..62f3e22de3d 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -19,6 +19,11 @@ package org.elasticsearch.search.builder; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -39,14 +44,15 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.AbstractQueryTestCase; import org.elasticsearch.index.query.EmptyQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; -import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.Script; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder.InnerHit; @@ -63,11 +69,6 @@ import org.elasticsearch.threadpool.ThreadPoolModule; import org.junit.AfterClass; import org.junit.BeforeClass; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.TimeUnit; - import static org.hamcrest.Matchers.equalTo; public class SearchSourceBuilderTests extends ESTestCase { @@ -81,28 +82,35 @@ public class SearchSourceBuilderTests extends ESTestCase { public static void init() throws IOException { Settings settings = Settings.settingsBuilder() .put("name", SearchSourceBuilderTests.class.toString()) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); + namedWriteableRegistry = new NamedWriteableRegistry(); injector = new ModulesBuilder().add( new SettingsModule(settings, new SettingsFilter(settings)), new ThreadPoolModule(new ThreadPool(settings)), - new IndicesModule() { + new SearchModule(settings, namedWriteableRegistry) { @Override - public void configure() { - // skip services - bindQueryParsersExtension(); + protected void configureSearch() { + // skip me so we don't need transport + } + @Override + protected void configureAggs(IndicesQueriesRegistry indicesQueriesRegistry) { + // skip me so we don't need scripting + } + @Override + protected void configureSuggesters() { + // skip me so we don't need IndicesService } }, new AbstractModule() { @Override protected void configure() { Multibinder.newSetBinder(binder(), ScoreFunctionParser.class); - bind(NamedWriteableRegistry.class).asEagerSingleton(); + bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); } } ).createInjector(); indicesQueriesRegistry = injector.getInstance(IndicesQueriesRegistry.class); - namedWriteableRegistry = injector.getInstance(NamedWriteableRegistry.class); } @AfterClass diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index 45be05c10d8..bd396cf6374 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -67,6 +67,7 @@ import static org.elasticsearch.index.query.QueryBuilders.idsQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; +import static org.elasticsearch.index.query.QueryBuilders.parentId; import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; @@ -96,8 +97,8 @@ public class ChildQuerySearchIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) // aggressive filter caching so that we can assert on the filter cache size - .put(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE) - .put(IndexModule.QUERY_CACHE_EVERYTHING, true) + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) + .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) .build(); } @@ -208,7 +209,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1")); // TEST matching on parent - searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent", "p1")).fields("_parent").get(); + searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).fields("_parent").get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2"))); @@ -216,7 +217,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1")); - searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent:p1")).fields("_parent").get(); + searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).fields("_parent").get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2"))); @@ -953,70 +954,63 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).score(), equalTo(3.0f)); } - public void testParentFieldFilter() throws Exception { + public void testParentFieldQuery() throws Exception { assertAcked(prepareCreate("test") .setSettings(settingsBuilder().put(indexSettings()) .put("index.refresh_interval", -1)) .addMapping("parent") - .addMapping("child", "_parent", "type=parent") - .addMapping("child2", "_parent", "type=parent")); + .addMapping("child", "_parent", "type=parent")); ensureGreen(); - // test term filter - SearchResponse response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1"))) + SearchResponse response = client().prepareSearch("test").setQuery(termQuery("_parent", "p1")) .get(); - assertHitCount(response, 0l); + assertHitCount(response, 0L); - client().prepareIndex("test", "some_type", "1").setSource("field", "value").get(); - client().prepareIndex("test", "parent", "p1").setSource("p_field", "value").get(); - client().prepareIndex("test", "child", "c1").setSource("c_field", "value").setParent("p1").get(); - - response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1"))).execute() - .actionGet(); - assertHitCount(response, 0l); + client().prepareIndex("test", "child", "c1").setSource("{}").setParent("p1").get(); refresh(); - response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1"))).execute() - .actionGet(); - assertHitCount(response, 1l); + response = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).get(); + assertHitCount(response, 1L); - response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "parent#p1"))).execute() - .actionGet(); - assertHitCount(response, 1l); - - client().prepareIndex("test", "parent2", "p1").setSource("p_field", "value").setRefresh(true).get(); - - response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1"))).execute() - .actionGet(); - assertHitCount(response, 1l); - - response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "parent#p1"))).execute() - .actionGet(); - assertHitCount(response, 1l); - - // test terms filter - client().prepareIndex("test", "child2", "c1").setSource("c_field", "value").setParent("p1").get(); - response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "p1"))).execute() - .actionGet(); - assertHitCount(response, 1l); - - response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "parent#p1"))).execute() - .actionGet(); - assertHitCount(response, 1l); + response = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).get(); + assertHitCount(response, 1L); + client().prepareIndex("test", "child", "c2").setSource("{}").setParent("p2").get(); refresh(); - response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "p1"))).execute() - .actionGet(); - assertHitCount(response, 2l); - - refresh(); - response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "p1", "p1"))).execute() - .actionGet(); - assertHitCount(response, 2l); + response = client().prepareSearch("test").setQuery(termsQuery("_parent#parent", "p1", "p2")).get(); + assertHitCount(response, 2L); response = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "parent#p1", "parent2#p1"))).get(); - assertHitCount(response, 2l); + .setQuery(boolQuery() + .should(termQuery("_parent#parent", "p1")) + .should(termQuery("_parent#parent", "p2")) + ).get(); + assertHitCount(response, 2L); + } + + public void testParentIdQuery() throws Exception { + assertAcked(prepareCreate("test") + .setSettings(settingsBuilder().put(indexSettings()) + .put("index.refresh_interval", -1)) + .addMapping("parent") + .addMapping("child", "_parent", "type=parent")); + ensureGreen(); + + client().prepareIndex("test", "child", "c1").setSource("{}").setParent("p1").get(); + refresh(); + + SearchResponse response = client().prepareSearch("test").setQuery(parentId("child", "p1")).get(); + assertHitCount(response, 1L); + + client().prepareIndex("test", "child", "c2").setSource("{}").setParent("p2").get(); + refresh(); + + response = client().prepareSearch("test") + .setQuery(boolQuery() + .should(parentId("child", "p1")) + .should(parentId("child", "p2")) + ).get(); + assertHitCount(response, 2L); } public void testHasChildNotBeingCached() throws IOException { @@ -1459,7 +1453,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { refresh(); SearchResponse response = client().prepareSearch("test") - .setQuery(multiMatchQuery("1", "_parent")) + .setQuery(multiMatchQuery("1", "_parent#type1")) .get(); assertThat(response.getHits().totalHits(), equalTo(1l)); diff --git a/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java b/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java index b5f20e9357c..1a4493a9832 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingIT.java @@ -33,9 +33,12 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.util.Collection; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -44,10 +47,15 @@ import static org.hamcrest.Matchers.greaterThan; public class ParentFieldLoadingIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.merge.enabled + } + private final Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_REFRESH_INTERVAL, -1) + .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1) // We never want merges in this test to ensure we have two segments for the last validation .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) .build(); @@ -149,7 +157,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase { MapperService mapperService = indexService.mapperService(); DocumentMapper documentMapper = mapperService.documentMapper("child"); if (documentMapper != null) { - verified = documentMapper.parentFieldMapper().getChildJoinFieldType().fieldDataType().getLoading() == MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS; + verified = documentMapper.parentFieldMapper().fieldType().fieldDataType().getLoading() == MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS; } } assertTrue(verified); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index e4d44ec73af..695575d9d08 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -33,14 +33,17 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Locale; @@ -66,6 +69,11 @@ import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.lessThan; public class DecayFunctionScoreIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + public void testDistanceScoreGeoLinGaussExp() throws Exception { assertAcked(prepareCreate("test").addMapping( "type1", diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java index b428d911dd5..d1d8278b79b 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java @@ -19,6 +19,8 @@ package org.elasticsearch.search.functionscore; +import java.util.Collection; + import org.apache.lucene.search.Explanation; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.SearchResponse; @@ -36,8 +38,6 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; -import java.util.Collection; - import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -103,7 +103,7 @@ public class FunctionScorePluginIT extends ESIntegTestCase { } public void onModule(SearchModule scoreModule) { - scoreModule.registerFunctionScoreParser(FunctionScorePluginIT.CustomDistanceScoreParser.class); + scoreModule.registerFunctionScoreParser(new FunctionScorePluginIT.CustomDistanceScoreParser()); } } diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java index b354edfc60e..14d620b9b99 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java @@ -26,10 +26,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.GeoValidationMethod; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; +import java.util.Collection; + import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; @@ -42,6 +46,11 @@ import static org.hamcrest.Matchers.equalTo; * */ public class GeoBoundingBoxIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + public void testSimpleBoundingBoxTest() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index e96c53fe943..ac8e9e029b5 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -52,8 +52,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.GeohashCellQuery; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.junit.BeforeClass; @@ -91,6 +93,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; */ public class GeoFilterIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + private static boolean intersectSupport; private static boolean disjointSupport; private static boolean withinSupport; diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java index 69b93018d8d..4d097ec87ab 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java @@ -26,11 +26,14 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -44,6 +47,11 @@ import static org.hamcrest.Matchers.equalTo; @ESIntegTestCase.SuiteScopeTestCase public class GeoPolygonIT extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + @Override protected void setupSuiteScopeCluster() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index 2ac5895c9eb..383bde00b01 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -19,6 +19,13 @@ package org.elasticsearch.search.highlight; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseFieldMatcher; @@ -42,16 +49,13 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilders; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.query.IdsQueryBuilder; -import org.elasticsearch.index.query.IdsQueryParser; import org.elasticsearch.index.query.MatchAllQueryBuilder; -import org.elasticsearch.index.query.MatchAllQueryParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.TermQueryParser; import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.highlight.HighlightBuilder.Field; import org.elasticsearch.search.highlight.HighlightBuilder.Order; import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions; @@ -60,15 +64,6 @@ import org.elasticsearch.test.IndexSettingsModule; import org.junit.AfterClass; import org.junit.BeforeClass; -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -84,12 +79,7 @@ public class HighlightBuilderTests extends ESTestCase { @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - @SuppressWarnings("rawtypes") - Set injectedQueryParsers = new HashSet<>(); - injectedQueryParsers.add(new MatchAllQueryParser()); - injectedQueryParsers.add(new IdsQueryParser()); - injectedQueryParsers.add(new TermQueryParser()); - indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(), injectedQueryParsers, namedWriteableRegistry); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry(); } @AfterClass diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index daa996be702..1e71b868236 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -40,6 +40,7 @@ import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.util.ArrayList; import java.util.Collection; @@ -72,7 +73,7 @@ import static org.hamcrest.Matchers.nullValue; public class InnerHitsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(MockScriptEngine.TestPlugin.class); + return pluginList(MockScriptEngine.TestPlugin.class, InternalSettingsPlugin.class); } public void testSimpleNested() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/internal/DefaultSearchContextTests.java b/core/src/test/java/org/elasticsearch/search/internal/DefaultSearchContextTests.java new file mode 100644 index 00000000000..d8fe2308bc7 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/internal/DefaultSearchContextTests.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.internal; + +import org.apache.lucene.queries.TermsQuery; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.index.mapper.internal.TypeFieldMapper; +import org.elasticsearch.test.ESTestCase; + +import static org.apache.lucene.search.BooleanClause.Occur.FILTER; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class DefaultSearchContextTests extends ESTestCase { + + public void testCreateSearchFilter() { + Query searchFilter = DefaultSearchContext.createSearchFilter(new String[]{"type1", "type2"}, null, randomBoolean()); + Query expectedQuery = new BooleanQuery.Builder() + .add(new TermsQuery(TypeFieldMapper.NAME, new BytesRef("type1"), new BytesRef("type2")), FILTER) + .build(); + assertThat(searchFilter, equalTo(expectedQuery)); + + searchFilter = DefaultSearchContext.createSearchFilter(new String[]{"type1", "type2"}, new MatchAllDocsQuery(), randomBoolean()); + expectedQuery = new BooleanQuery.Builder() + .add(new TermsQuery(TypeFieldMapper.NAME, new BytesRef("type1"), new BytesRef("type2")), FILTER) + .add(new MatchAllDocsQuery(), FILTER) + .build(); + assertThat(searchFilter, equalTo(expectedQuery)); + + searchFilter = DefaultSearchContext.createSearchFilter(null, null, false); + assertThat(searchFilter, nullValue()); + + searchFilter = DefaultSearchContext.createSearchFilter(null, null, true); + expectedQuery = new BooleanQuery.Builder().add(Queries.newNonNestedFilter(), FILTER).build(); + assertThat(searchFilter, equalTo(expectedQuery)); + + searchFilter = DefaultSearchContext.createSearchFilter(null, new MatchAllDocsQuery(), true); + expectedQuery = new BooleanQuery.Builder() + .add(new MatchAllDocsQuery(), FILTER) + .add(Queries.newNonNestedFilter(), FILTER) + .build(); + assertThat(searchFilter, equalTo(expectedQuery)); + + searchFilter = DefaultSearchContext.createSearchFilter(null, new MatchAllDocsQuery(), false); + expectedQuery = new BooleanQuery.Builder() + .add(new MatchAllDocsQuery(), FILTER) + .build(); + assertThat(searchFilter, equalTo(expectedQuery)); + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 61890092831..fd9ee9a3f10 100644 --- a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -235,7 +235,7 @@ public class SimpleNestedIT extends ESIntegTestCase { // This IncludeNestedDocsQuery also needs to be aware of the filter from alias public void testDeleteNestedDocsWithAlias() throws Exception { assertAcked(prepareCreate("test") - .setSettings(settingsBuilder().put(indexSettings()).put("index.referesh_interval", -1).build()) + .setSettings(settingsBuilder().put(indexSettings()).put("index.refresh_interval", -1).build()) .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("field1") .field("type", "string") @@ -412,7 +412,7 @@ public class SimpleNestedIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .setSettings(settingsBuilder() .put(indexSettings()) - .put("index.referesh_interval", -1)) + .put("index.refresh_interval", -1)) .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("nested1") .field("type", "nested") diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index a789497235e..1cbdf60a4a4 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -230,6 +231,12 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .setQuery(randomizeType(multiMatchQuery("15", "skill"))).get(); assertNoFailures(searchResponse); assertFirstHit(searchResponse, hasId("theone")); + + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("15", "skill", "int-field")).analyzer("category")).get(); + assertNoFailures(searchResponse); + assertFirstHit(searchResponse, hasId("theone")); + String[] fields = {"full_name", "first_name", "last_name", "last_name_phrase", "first_name_phrase", "category_phrase", "category"}; String[] query = {"marvel","hero", "captain", "america", "15", "17", "1", "5", "ultimate", "Man", @@ -459,18 +466,65 @@ public class MultiMatchQueryIT extends ESIntegTestCase { assertHitCount(searchResponse, 1l); assertFirstHit(searchResponse, hasId("theone")); + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill", "int-field") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category") + .operator(Operator.AND))).get(); + assertHitCount(searchResponse, 1l); + assertFirstHit(searchResponse, hasId("theone")); + + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("captain america 15", "skill", "full_name", "first_name", "last_name", "category", "int-field") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category") + .operator(Operator.AND))).get(); + assertHitCount(searchResponse, 1l); + assertFirstHit(searchResponse, hasId("theone")); + + searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("captain america 15", "first_name", "last_name", "skill") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .analyzer("category"))).get(); assertFirstHit(searchResponse, hasId("theone")); + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("15", "skill") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category"))).get(); + assertFirstHit(searchResponse, hasId("theone")); + + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("25 15", "skill") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category"))).get(); + assertFirstHit(searchResponse, hasId("theone")); + searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("25 15", "int-field", "skill") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) .analyzer("category"))).get(); assertFirstHit(searchResponse, hasId("theone")); + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("25 15", "first_name", "int-field", "skill") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category"))).get(); + assertFirstHit(searchResponse, hasId("theone")); + + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("25 15", "int-field", "skill", "first_name") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category"))).get(); + assertFirstHit(searchResponse, hasId("theone")); + + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("25 15", "int-field", "first_name", "skill") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category"))).get(); + assertFirstHit(searchResponse, hasId("theone")); + searchResponse = client().prepareSearch("test") .setQuery(randomizeType(multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category") .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) @@ -529,6 +583,46 @@ public class MultiMatchQueryIT extends ESIntegTestCase { assertFirstHit(searchResponse, hasId("ultimate2")); assertSecondHit(searchResponse, hasId("ultimate1")); assertThat(searchResponse.getHits().hits()[0].getScore(), greaterThan(searchResponse.getHits().hits()[1].getScore())); + + // Test group based on numeric fields + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("15", "skill") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))).get(); + assertFirstHit(searchResponse, hasId("theone")); + + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("15", "skill", "first_name") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))).get(); + assertFirstHit(searchResponse, hasId("theone")); + + // Two numeric fields together caused trouble at one point! + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("15", "int-field", "skill") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))).get(); + assertFirstHit(searchResponse, hasId("theone")); + + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("15", "int-field", "first_name", "skill") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))).get(); + assertFirstHit(searchResponse, hasId("theone")); + + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("alpha 15", "first_name", "skill") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .lenient(true))).get(); + assertFirstHit(searchResponse, hasId("ultimate1")); + /* + * Doesn't find theone because "alpha 15" isn't a number and we don't + * break on spaces. + */ + assertHitCount(searchResponse, 1); + + // Lenient wasn't always properly lenient with two numeric fields + searchResponse = client().prepareSearch("test") + .setQuery(randomizeType(multiMatchQuery("alpha 15", "int-field", "first_name", "skill") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .lenient(true))).get(); + assertFirstHit(searchResponse, hasId("ultimate1")); } private static final void assertEquivalent(String query, SearchResponse left, SearchResponse right) { diff --git a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java index 96f2e23bba1..20cf8596c4a 100644 --- a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -57,7 +57,7 @@ public class SearchScrollWithFailingNodesIT extends ESIntegTestCase { assertAcked( prepareCreate("test") // Enforces that only one shard can only be allocated to a single node - .setSettings(Settings.builder().put(indexSettings()).put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE, 1)) + .setSettings(Settings.builder().put(indexSettings()).put(ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), 1)) ); List writes = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java index 27cc3d3cfb8..d14ea50838f 100644 --- a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.internal.DefaultSearchContext; @@ -40,6 +41,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; @@ -124,6 +126,16 @@ public class SimpleSearchIT extends ESIntegTestCase { refresh(); SearchResponse search = client().prepareSearch() + .setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1"))) + .execute().actionGet(); + assertHitCount(search, 1L); + + search = client().prepareSearch() + .setQuery(queryStringQuery("ip: 192.168.0.1")) + .execute().actionGet(); + assertHitCount(search, 1L); + + search = client().prepareSearch() .setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/32"))) .execute().actionGet(); assertHitCount(search, 1l); @@ -282,54 +294,54 @@ public class SimpleSearchIT extends ESIntegTestCase { createIndex("idx"); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); - assertWindowFails(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW)); - assertWindowFails(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW + 1)); - assertWindowFails(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW) - .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW)); + assertWindowFails(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY))); + assertWindowFails(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1)); + assertWindowFails(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) + .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY))); } public void testLargeFromAndSizeSucceeds() throws Exception { createIndex("idx"); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); - assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW - 10).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW / 2) - .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW / 2 - 1).get(), 1); + assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) - 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) / 2) + .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) / 2 - 1).get(), 1); } public void testTooLargeFromAndSizeOkBySetting() throws Exception { - prepareCreate("idx").setSettings(DefaultSearchContext.MAX_RESULT_WINDOW, DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 2).get(); + prepareCreate("idx").setSettings(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 2).get(); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); - assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW + 1).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW) - .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); + assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) + .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); } public void testTooLargeFromAndSizeOkByDynamicSetting() throws Exception { createIndex("idx"); assertAcked(client().admin().indices().prepareUpdateSettings("idx") .setSettings( - Settings.builder().put(DefaultSearchContext.MAX_RESULT_WINDOW, DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 2)) + Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 2)) .get()); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); - assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW + 1).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW) - .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW).get(), 1); + assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) + .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)).get(), 1); } public void testTooLargeFromAndSizeBackwardsCompatibilityRecommendation() throws Exception { - prepareCreate("idx").setSettings(DefaultSearchContext.MAX_RESULT_WINDOW, Integer.MAX_VALUE).get(); + prepareCreate("idx").setSettings(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), Integer.MAX_VALUE).get(); indexRandom(true, client().prepareIndex("idx", "type").setSource("{}")); - assertHitCount(client().prepareSearch("idx").setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10).get(), 1); - assertHitCount(client().prepareSearch("idx").setSize(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10) - .setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1); + assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10) + .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10).get(), 1); } public void testQueryNumericFieldWithRegex() throws Exception { @@ -350,7 +362,7 @@ public class SimpleSearchIT extends ESIntegTestCase { fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("Result window is too large, from + size must be less than or equal to: [" - + DefaultSearchContext.Defaults.MAX_RESULT_WINDOW)); + + IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY))); assertThat(e.toString(), containsString("See the scroll api for a more efficient way to request large data sets")); } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java b/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java new file mode 100644 index 00000000000..e505ec68e6a --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/sort/SortOrderTests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.sort; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class SortOrderTests extends ESTestCase { + + /** Check that ordinals remain stable as we rely on them for serialisation. */ + public void testDistanceUnitNames() { + assertEquals(0, SortOrder.ASC.ordinal()); + assertEquals(1, SortOrder.DESC.ordinal()); + } + + public void testReadWrite() throws Exception { + for (SortOrder unit : SortOrder.values()) { + try (BytesStreamOutput out = new BytesStreamOutput()) { + unit.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat("Roundtrip serialisation failed.", SortOrder.readOrderFrom(in), equalTo(unit)); + } + } + } + } + + public void testFromString() { + for (SortOrder unit : SortOrder.values()) { + assertThat("Roundtrip string parsing failed.", SortOrder.fromString(unit.toString()), equalTo(unit)); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java index 8912956489f..1df6960b6df 100644 --- a/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/core/src/test/java/org/elasticsearch/similarity/SimilarityIT.java @@ -55,7 +55,7 @@ public class SimilarityIT extends ESIntegTestCase { .put("index.number_of_replicas", 0) .put("similarity.custom.type", "BM25") .put("similarity.custom.k1", 2.0f) - .put("similarity.custom.b", 1.5f) + .put("similarity.custom.b", 0.5f) ).execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource("field1", "the quick brown fox jumped over the lazy dog", diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index dcea25617b2..22bef614bf6 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -100,7 +100,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> start 2 nodes"); Settings nodeSettings = settingsBuilder() .put("discovery.type", "zen") - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "200ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "200ms") .put("discovery.initial_state_timeout", "500ms") .build(); internalCluster().startNode(nodeSettings); @@ -766,7 +766,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> update index settings to back to normal"); assertAcked(client().admin().indices().prepareUpdateSettings("test-*").setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "node") + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "node") )); // Make sure that snapshot finished - doesn't matter if it failed or succeeded @@ -888,12 +888,11 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest } assertAcked(client().admin().indices().prepareUpdateSettings(name).setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "all") - .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, between(100, 50000)) + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "all") + .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), between(100, 50000)) )); } - static { MetaData.registerPrototype(SnapshottableMetadata.TYPE, SnapshottableMetadata.PROTO); MetaData.registerPrototype(NonSnapshottableMetadata.TYPE, NonSnapshottableMetadata.PROTO); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index bd94a974a0f..dac6ac0904d 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -80,8 +80,8 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.IndexSettings.INDEX_REFRESH_INTERVAL; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAliasesExist; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAliasesMissing; @@ -748,7 +748,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .put("location", randomRepoPath()))); logger.info("--> creating index that cannot be allocated"); - prepareCreate("test-idx", 2, Settings.builder().put(FilterAllocationDecider.INDEX_ROUTING_INCLUDE_GROUP + ".tag", "nowhere").put("index.number_of_shards", 3)).get(); + prepareCreate("test-idx", 2, Settings.builder().put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + ".tag", "nowhere").put("index.number_of_shards", 3)).get(); logger.info("--> snapshot"); CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get(); @@ -1524,8 +1524,8 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas // Update settings to make sure that relocation is slow so we can start snapshot before relocation is finished assertAcked(client.admin().indices().prepareUpdateSettings("test-idx").setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "all") - .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC, 100, ByteSizeUnit.BYTES) + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "all") + .put(IndexStore.INDEX_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(), 100, ByteSizeUnit.BYTES) )); logger.info("--> start relocations"); @@ -1540,7 +1540,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas // Update settings to back to normal assertAcked(client.admin().indices().prepareUpdateSettings("test-idx").setSettings(Settings.builder() - .put(IndexStore.INDEX_STORE_THROTTLE_TYPE, "node") + .put(IndexStore.INDEX_STORE_THROTTLE_TYPE_SETTING.getKey(), "node") )); logger.info("--> wait for snapshot to complete"); @@ -1627,7 +1627,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas Settings.Builder indexSettings = Settings.builder() .put(indexSettings()) .put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)) - .put(INDEX_REFRESH_INTERVAL, "10s") + .put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s") .put("index.analysis.analyzer.my_analyzer.type", "custom") .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") .putArray("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym") @@ -1695,7 +1695,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> assert that correct settings are restored"); GetSettingsResponse getSettingsResponse = client.admin().indices().prepareGetSettings("test-idx").execute().actionGet(); - assertThat(getSettingsResponse.getSetting("test-idx", INDEX_REFRESH_INTERVAL), equalTo("5s")); + assertThat(getSettingsResponse.getSetting("test-idx", INDEX_REFRESH_INTERVAL_SETTING.getKey()), equalTo("5s")); // Make sure that number of shards didn't change assertThat(getSettingsResponse.getSetting("test-idx", SETTING_NUMBER_OF_SHARDS), equalTo("" + numberOfShards)); assertThat(getSettingsResponse.getSetting("test-idx", "index.analysis.analyzer.my_analyzer.type"), equalTo("standard")); @@ -1717,7 +1717,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> assert that correct settings are restored and index is still functional"); getSettingsResponse = client.admin().indices().prepareGetSettings("test-idx").execute().actionGet(); - assertThat(getSettingsResponse.getSetting("test-idx", INDEX_REFRESH_INTERVAL), equalTo("5s")); + assertThat(getSettingsResponse.getSetting("test-idx", INDEX_REFRESH_INTERVAL_SETTING.getKey()), equalTo("5s")); // Make sure that number of shards didn't change assertThat(getSettingsResponse.getSetting("test-idx", SETTING_NUMBER_OF_SHARDS), equalTo("" + numberOfShards)); @@ -1739,7 +1739,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas Settings.Builder indexSettings = Settings.builder() .put(indexSettings()) .put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)) - .put(INDEX_REFRESH_INTERVAL, "10s"); + .put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s"); logger.info("--> create index"); assertAcked(prepareCreate("test-idx", 2, indexSettings)); diff --git a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index 60f1bad6089..ea225d9680b 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; @@ -186,7 +187,7 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { public void testThreadPoolLeakingThreadsWithTribeNode() { Settings settings = Settings.builder() .put("node.name", "thread_pool_leaking_threads_tribe_node") - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("tribe.t1.cluster.name", "non_existing_cluster") //trigger initialization failure of one of the tribes (doesn't require starting the node) .put("tribe.t1.plugin.mandatory", "non_existing").build(); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java index ee49012291d..f4dccc77161 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -73,7 +74,7 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase { Settings settings = settingsBuilder() .put("cluster.name", internalCluster().getClusterName()) .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); try (TransportClient transportClient = TransportClient.builder().settings(settings).build()) { transportClient.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("127.0.0.1"), randomPort)); diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index 1350dcbb8ed..260c6252efd 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.MasterNotDiscoveredException; @@ -51,6 +52,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -78,7 +80,7 @@ public class TribeIT extends ESIntegTestCase { NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(Node.HTTP_ENABLED, false).build(); + return Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false).build(); } @Override @@ -93,7 +95,7 @@ public class TribeIT extends ESIntegTestCase { }; cluster2 = new InternalTestCluster(InternalTestCluster.configuredNodeMode(), randomLong(), createTempDir(), 2, 2, - Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, Collections.emptyList()); + Strings.randomBase64UUID(getRandom()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, Collections.emptyList(), Function.identity()); cluster2.beforeTest(getRandom(), 0.1); cluster2.ensureAtLeastNumDataNodes(2); @@ -134,8 +136,8 @@ public class TribeIT extends ESIntegTestCase { tribe2Defaults.put("tribe.t2." + entry.getKey(), entry.getValue()); } // give each tribe it's unicast hosts to connect to - tribe1Defaults.putArray("tribe.t1." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS, getUnicastHosts(internalCluster().client())); - tribe1Defaults.putArray("tribe.t2." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS, getUnicastHosts(cluster2.client())); + tribe1Defaults.putArray("tribe.t1." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey(), getUnicastHosts(internalCluster().client())); + tribe1Defaults.putArray("tribe.t2." + UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.getKey(), getUnicastHosts(cluster2.client())); Settings merged = Settings.builder() .put("tribe.t1.cluster.name", internalCluster().getClusterName()) diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index 7cf79cacf56..68d3f7d828a 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -33,10 +33,10 @@ import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.index.engine.VersionConflictEngineException; -import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; @@ -47,6 +47,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.util.ArrayList; @@ -153,12 +154,6 @@ public class UpdateIT extends ESIntegTestCase { return ctx; } - - @Override - public Object unwrap(Object value) { - return value; - } - }; } @@ -244,12 +239,6 @@ public class UpdateIT extends ESIntegTestCase { source.put(field, currentValue.longValue() + inc.longValue()); return ctx; } - - @Override - public Object unwrap(Object value) { - return value; - } - }; } @@ -335,12 +324,6 @@ public class UpdateIT extends ESIntegTestCase { source.put("balance", oldBalance.intValue() - deduction); return ctx; } - - @Override - public Object unwrap(Object value) { - return value; - } - }; } @@ -427,12 +410,6 @@ public class UpdateIT extends ESIntegTestCase { return ctx; } - - @Override - public Object unwrap(Object value) { - return value; - } - }; } @@ -453,7 +430,9 @@ public class UpdateIT extends ESIntegTestCase { PutFieldValuesScriptPlugin.class, FieldIncrementScriptPlugin.class, ScriptedUpsertScriptPlugin.class, - ExtractContextInSourceScriptPlugin.class); + ExtractContextInSourceScriptPlugin.class, + InternalSettingsPlugin.class // uses index.merge.enabled + ); } private void createTestIndex() throws Exception { diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 2edc39c7029..00000000000 --- a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -69e187ef1d2d9c9570363eb4186821e0341df5b8 \ No newline at end of file diff --git a/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..bb0dbacbfbf --- /dev/null +++ b/distribution/licenses/lucene-analyzers-common-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +528a695bb8882dbc3d9866335ac1bb3905cba4e3 \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 0b6a49a68e3..00000000000 --- a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0fa00a45ff9bc6a4df44db81f2e4e44ea94bf88e \ No newline at end of file diff --git a/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..ec23e314ecb --- /dev/null +++ b/distribution/licenses/lucene-backward-codecs-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +3fb1bcc1001a10b74ae91848c8558572891c1409 \ No newline at end of file diff --git a/distribution/licenses/lucene-core-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-core-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 3ff27aff723..00000000000 --- a/distribution/licenses/lucene-core-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6854c65c7f4c6d9de583f4daa4fd3ae8a3800f1 \ No newline at end of file diff --git a/distribution/licenses/lucene-core-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-core-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..fc810e4e240 --- /dev/null +++ b/distribution/licenses/lucene-core-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +9eff7f186877882f8b68f031f610bd7ab8c5c1fb \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 9ffcb6d07cf..00000000000 --- a/distribution/licenses/lucene-grouping-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e996e6c723eb415ba2cfa7f5e98bbf194a4918dd \ No newline at end of file diff --git a/distribution/licenses/lucene-grouping-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-grouping-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..28851461aa1 --- /dev/null +++ b/distribution/licenses/lucene-grouping-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +6e6253936522f27b35ba4d8485806f517ef2df45 \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index b126eebd88f..00000000000 --- a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3b7a5d97b10885f16eb53deb15d64c942b9f9fdb \ No newline at end of file diff --git a/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..9d434fde909 --- /dev/null +++ b/distribution/licenses/lucene-highlighter-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +8a313aa34b0070d3f7d48005e7677b680db1b09d \ No newline at end of file diff --git a/distribution/licenses/lucene-join-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-join-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 8313bac1acf..00000000000 --- a/distribution/licenses/lucene-join-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e4dda3eeb76e340aa4713a3b20d68c4a1504e505 \ No newline at end of file diff --git a/distribution/licenses/lucene-join-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-join-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..9a7a013720b --- /dev/null +++ b/distribution/licenses/lucene-join-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +bf4c5a17cfb265d321ef4cfb0f3d7c1a6a6651de \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 1802f859ae0..00000000000 --- a/distribution/licenses/lucene-memory-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -800442a5d7612ce4c8748831871b4d436a50554e \ No newline at end of file diff --git a/distribution/licenses/lucene-memory-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-memory-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..c5ecb0f0970 --- /dev/null +++ b/distribution/licenses/lucene-memory-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +2713a319d0aa696c65a32a36fda830bc482a5880 \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 1c543141bbf..00000000000 --- a/distribution/licenses/lucene-misc-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bdf184de9b5773c7af3ae908af78eeb1e512470c \ No newline at end of file diff --git a/distribution/licenses/lucene-misc-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-misc-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..6d5665b5671 --- /dev/null +++ b/distribution/licenses/lucene-misc-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +88251ecdbf877c15a94d4013aa5157f5b5ce4cea \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index f3eb218b9e0..00000000000 --- a/distribution/licenses/lucene-queries-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fc59de52bd2c7e420edfd235723cb8b0dd44e92d \ No newline at end of file diff --git a/distribution/licenses/lucene-queries-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-queries-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..aa97465d97b --- /dev/null +++ b/distribution/licenses/lucene-queries-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +bf9e522244c7c4eee6c3bcc3212ff057f7b88000 \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 4ce5c2024f0..00000000000 --- a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1d341e6a4f11f3170773ccffdbe6815b45967e3d \ No newline at end of file diff --git a/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..9942349f5a4 --- /dev/null +++ b/distribution/licenses/lucene-queryparser-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +12d71cf10a4b79231dc488af16d723dfca5ab64b \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index cf78d108a11..00000000000 --- a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a1b02c2b595ac92f45f0d2be03841a3a7fcae1f1 \ No newline at end of file diff --git a/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..f05a9b50d57 --- /dev/null +++ b/distribution/licenses/lucene-sandbox-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +f903d67d042904527a7e2e8a75c55afe36a04251 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 2634a93e82d..00000000000 --- a/distribution/licenses/lucene-spatial-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e3ea422b56734329fb6974e9cf9f66478adb5793 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-spatial-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..8c53d4662e5 --- /dev/null +++ b/distribution/licenses/lucene-spatial-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +2f5758bbcf97048ab62d2d4ae73867d06f1ed03f \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 391d044c719..00000000000 --- a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5eadbd4e63120b59ab6445e39489205f98420471 \ No newline at end of file diff --git a/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..252156ad4af --- /dev/null +++ b/distribution/licenses/lucene-spatial3d-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +2cc29e4658be151658fac6e5ed7915982b6de861 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-5.5.0-snapshot-1721183.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index f9f2bf5a43c..00000000000 --- a/distribution/licenses/lucene-suggest-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a336287e65d082535f02a8427666dbe46b1b9b74 \ No newline at end of file diff --git a/distribution/licenses/lucene-suggest-5.5.0-snapshot-1725675.jar.sha1 b/distribution/licenses/lucene-suggest-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..e7154958580 --- /dev/null +++ b/distribution/licenses/lucene-suggest-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +f490a09ca056aba42e8751a469ef114df64aae0d \ No newline at end of file diff --git a/distribution/src/main/packaging/systemd/elasticsearch.service b/distribution/src/main/packaging/systemd/elasticsearch.service index 4a280a09d38..301586c1038 100644 --- a/distribution/src/main/packaging/systemd/elasticsearch.service +++ b/distribution/src/main/packaging/systemd/elasticsearch.service @@ -26,11 +26,8 @@ ExecStart=/usr/share/elasticsearch/bin/elasticsearch \ -Des.default.path.data=${DATA_DIR} \ -Des.default.path.conf=${CONF_DIR} -# Connects standard output to /dev/null -StandardOutput=null - -# Connects standard error to journal -StandardError=journal +StandardOutput=journal +StandardError=inherit # Specifies the maximum file descriptor number that can be opened by this process LimitNOFILE=65535 diff --git a/distribution/src/main/resources/bin/elasticsearch.in.bat b/distribution/src/main/resources/bin/elasticsearch.in.bat index 6f6550dcdf0..7138cf5f5ca 100644 --- a/distribution/src/main/resources/bin/elasticsearch.in.bat +++ b/distribution/src/main/resources/bin/elasticsearch.in.bat @@ -93,7 +93,7 @@ set JAVA_OPTS=%JAVA_OPTS% -Djna.nosys=true REM check in case a user was using this mechanism if "%ES_CLASSPATH%" == "" ( -set ES_CLASSPATH=%ES_HOME%/lib/elasticsearch-${project.version}.jar;%ES_HOME%/lib/* +set ES_CLASSPATH=!ES_HOME!/lib/elasticsearch-${project.version}.jar;!ES_HOME!/lib/* ) else ( ECHO Error: Don't modify the classpath with ES_CLASSPATH, Best is to add 1>&2 ECHO additional elements via the plugin mechanism, or if code must really be 1>&2 diff --git a/distribution/src/main/resources/bin/service.bat b/distribution/src/main/resources/bin/service.bat index 5b5fbff7522..f423bb9740f 100644 --- a/distribution/src/main/resources/bin/service.bat +++ b/distribution/src/main/resources/bin/service.bat @@ -1,5 +1,5 @@ @echo off -SETLOCAL +SETLOCAL enabledelayedexpansion TITLE Elasticsearch Service ${project.version} diff --git a/docs/java-api/docs/bulk.asciidoc b/docs/java-api/docs/bulk.asciidoc index 248326700c4..0b43b89c07c 100644 --- a/docs/java-api/docs/bulk.asciidoc +++ b/docs/java-api/docs/bulk.asciidoc @@ -127,5 +127,5 @@ bulkProcessor.close(); Both methods flush any remaining documents and disable all other scheduled flushes if they were scheduled by setting `flushInterval`. If concurrent requests were enabled the `awaitClose` method waits for up to the specified timeout for all bulk requests to complete then returns `true`, if the specified waiting time elapses before all bulk requests complete, -`false` is returned. The `close` method doesn't wait for any remaining bulk requests to complete and exists immediately. +`false` is returned. The `close` method doesn't wait for any remaining bulk requests to complete and exits immediately. diff --git a/docs/java-api/search.asciidoc b/docs/java-api/search.asciidoc index a8da951fa27..27ad0beac1c 100644 --- a/docs/java-api/search.asciidoc +++ b/docs/java-api/search.asciidoc @@ -65,7 +65,7 @@ do { //Handle the hit... } - scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); + scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(60000)).execute().actionGet(); } while(scrollResp.getHits().getHits().length != 0); // Zero hits mark the end of the scroll and the while loop. -------------------------------------------------- diff --git a/docs/plugins/authors.asciidoc b/docs/plugins/authors.asciidoc index 9461ba8dd53..6f63eab83ca 100644 --- a/docs/plugins/authors.asciidoc +++ b/docs/plugins/authors.asciidoc @@ -3,8 +3,6 @@ The Elasticsearch repository contains examples of: -* a https://github.com/elastic/elasticsearch/tree/master/plugins/site-example[site plugin] - for serving static HTML, JavaScript, and CSS. * a https://github.com/elastic/elasticsearch/tree/master/plugins/jvm-example[Java plugin] which contains Java code. @@ -12,20 +10,6 @@ These examples provide the bare bones needed to get started. For more information about how to write a plugin, we recommend looking at the plugins listed in this documentation for inspiration. -[NOTE] -.Site plugins -==================================== - -The example site plugin mentioned above contains all of the scaffolding needed -for integrating with Gradle builds. If you don't plan on using Gradle, then all -you really need in your plugin is: - -* The `plugin-descriptor.properties` file -* The `_site/` directory -* The `_site/index.html` file - -==================================== - [float] === Plugin descriptor file @@ -43,7 +27,7 @@ instance, see https://github.com/elastic/elasticsearch/blob/master/plugins/site-example/build.gradle[`/plugins/site-example/build.gradle`]. [float] -==== Mandatory elements for all plugins +==== Mandatory elements for plugins [cols="<,<,<",options="header",] @@ -56,23 +40,6 @@ https://github.com/elastic/elasticsearch/blob/master/plugins/site-example/build. |`name` |String | the plugin name -|======================================================================= - - - -[float] -==== Mandatory elements for Java plugins - - -[cols="<,<,<",options="header",] -|======================================================================= -|Element | Type | Description - -|`jvm` |Boolean | true if the `classname` class should be loaded -from jar files in the root directory of the plugin. -Note that only jar files in the root directory are added to the classpath for the plugin! -If you need other resources, package them into a resources jar. - |`classname` |String | the name of the class to load, fully-qualified. |`java.version` |String | version of java the code is built against. @@ -83,6 +50,9 @@ of nonnegative decimal integers separated by "."'s and may have leading zeros. |======================================================================= +Note that only jar files in the root directory are added to the classpath for the plugin! +If you need other resources, package them into a resources jar. + [IMPORTANT] .Plugin release lifecycle ============================================== @@ -94,20 +64,6 @@ in the presence of plugins with the incorrect `elasticsearch.version`. ============================================== -[float] -==== Mandatory elements for Site plugins - - -[cols="<,<,<",options="header",] -|======================================================================= -|Element | Type | Description - -|`site` |Boolean | true to indicate contents of the `_site/` -directory in the root of the plugin should be served. - -|======================================================================= - - [float] === Testing your plugin diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index 5afff0cb89f..29ba5e49626 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -105,8 +105,7 @@ that bucketing should use a different time zone. Time zones may either be specified as an ISO 8601 UTC offset (e.g. `+01:00` or `-08:00`) or as a timezone id, an identifier used in the TZ database like -`America\Los_Angeles` (which would need to be escaped in JSON as -`"America\\Los_Angeles"`). +`America/Los_Angeles`. Consider the following example: diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc index 144d6f72548..bcef61d4ef7 100644 --- a/docs/reference/cluster/nodes-stats.asciidoc +++ b/docs/reference/cluster/nodes-stats.asciidoc @@ -131,9 +131,15 @@ the operating system: `os.cpu.percent`:: Recent CPU usage for the whole system, or -1 if not supported -`os.cpu.load_average`:: - Array of system load averages for the last one minute, five - minute and fifteen minutes (value of -1 indicates not supported) +`os.cpu.load_average.1m`:: + One-minute load average on the system (field is not present if + one-minute load average is not available) +`os.cpu.load_average.5m`:: + Five-minute load average on the system (field is not present if + five-minute load average is not available) +`os.cpu.load_average.15m`:: + Fifteen-minute load average on the system (field is not present if + fifteen-minute load average is not available) `os.mem.total_in_bytes`:: Total amount of physical memory in bytes diff --git a/docs/reference/cluster/reroute.asciidoc b/docs/reference/cluster/reroute.asciidoc index cc4dffa612a..99e754df529 100644 --- a/docs/reference/cluster/reroute.asciidoc +++ b/docs/reference/cluster/reroute.asciidoc @@ -20,7 +20,7 @@ curl -XPOST 'localhost:9200/_cluster/reroute' -d '{ } }, { - "allocate" : { + "allocate_replica" : { "index" : "test", "shard" : 1, "node" : "node3" } } @@ -64,14 +64,42 @@ The commands supported are: from the primary shard by cancelling them and allowing them to be reinitialized through the standard reallocation process. -`allocate`:: - Allocate an unassigned shard to a node. Accepts the +`allocate_replica`:: + Allocate an unassigned replica shard to a node. Accepts the `index` and `shard` for index name and shard number, and `node` to - allocate the shard to. It also accepts `allow_primary` flag to - explicitly specify that it is allowed to explicitly allocate a primary - shard (might result in data loss). + allocate the shard to. Takes <> into account. -WARNING: The `allow_primary` parameter will force a new _empty_ primary shard -to be allocated *without any data*. If a node which has a copy of the original -primary shard (including data) rejoins the cluster later on, that data will be -deleted: the old shard copy will be replaced by the new live shard copy. +Two more commands are available that allow the allocation of a primary shard +to a node. These commands should however be used with extreme care, as primary +shard allocation is usually fully automatically handled by Elasticsearch. +Reasons why a primary shard cannot be automatically allocated include the following: + +- A new index was created but there is no node which satisfies the allocation deciders. +- An up-to-date shard copy of the data cannot be found on the current data nodes in +the cluster. To prevent data loss, the system does not automatically promote a stale +shard copy to primary. + +As a manual override, two commands to forcefully allocate primary shards +are available: + +`allocate_stale_primary`:: + Allocate a primary shard to a node that holds a stale copy. Accepts the + `index` and `shard` for index name and shard number, and `node` to + allocate the shard to. Using this command may lead to data loss + for the provided shard id. If a node which has the good copy of the + data rejoins the cluster later on, that data will be overwritten with + the data of the stale copy that was forcefully allocated with this + command. To ensure that these implications are well-understood, + this command requires the special field `accept_data_loss` to be + explicitly set to `true` for it to work. + +`allocate_empty_primary`:: + Allocate an empty primary shard to a node. Accepts the + `index` and `shard` for index name and shard number, and `node` to + allocate the shard to. Using this command leads to a complete loss + of all data that was indexed into this shard, if it was previously + started. If a node which has a copy of the + data rejoins the cluster later on, that data will be deleted! + To ensure that these implications are well-understood, + this command requires the special field `accept_data_loss` to be + explicitly set to `true` for it to work. diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index 3aacdc7ed10..5f79efbcc60 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -33,6 +33,7 @@ The result of the above index operation is: -------------------------------------------------- The `_shards` header provides information about the replication process of the index operation. + * `total` - Indicates to how many shard copies (primary and replica shards) the index operation should be executed on. * `successful`- Indicates the number of shard copies the index operation succeeded on. * `failures` - An array that contains replication related errors in the case an index operation failed on a replica shard. diff --git a/docs/reference/index-modules/similarity.asciidoc b/docs/reference/index-modules/similarity.asciidoc index df37e7876c1..a5d95ffa3fa 100644 --- a/docs/reference/index-modules/similarity.asciidoc +++ b/docs/reference/index-modules/similarity.asciidoc @@ -107,6 +107,13 @@ All options but the first option need a normalization value. Type name: `DFR` +[float] +[[dfi]] +==== DFI similarity + +Similarity that implements the http://trec.nist.gov/pubs/trec21/papers/irra.web.nb.pdf[divergence from independence] +model (normalized chi-squared distance) + [float] [[ib]] ==== IB similarity. diff --git a/docs/reference/mapping/params/doc-values.asciidoc b/docs/reference/mapping/params/doc-values.asciidoc index a0108b899b9..81f9b6e3c64 100644 --- a/docs/reference/mapping/params/doc-values.asciidoc +++ b/docs/reference/mapping/params/doc-values.asciidoc @@ -7,7 +7,7 @@ unique sorted list of terms, and from that immediately have access to the list of documents that contain the term. Sorting, aggregations, and access to field values in scripts requires a -different data access pattern. Instead of lookup up the term and finding +different data access pattern. Instead of looking up the term and finding documents, we need to be able to look up the document and find the terms that it has in a field. diff --git a/docs/reference/mapping/params/similarity.asciidoc b/docs/reference/mapping/params/similarity.asciidoc index a3fdef1d43b..5f2245ad2f3 100644 --- a/docs/reference/mapping/params/similarity.asciidoc +++ b/docs/reference/mapping/params/similarity.asciidoc @@ -8,7 +8,7 @@ algorithm other than the default TF/IDF, such as `BM25`. Similarities are mostly useful for <> fields, especially `analyzed` string fields, but can also apply to other field types. -Custom similarites can be configured by tuning the parameters of the built-in +Custom similarities can be configured by tuning the parameters of the built-in similarities. For more details about this expert options, see the <>. diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index e13b94c7773..ed0bb47e9d4 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -199,3 +199,10 @@ phase. Instead, highlighting needs to be performed via ============================================= + +==== Limiting the number of `nested` fields + +Indexing a document with 100 nested fields actually indexes 101 documents as each nested +document is indexed as a separate document. To safeguard against ill-defined mappings +the number of nested fields that can be defined per index has been limited to 50. This +default limit can be changed with the index setting `index.mapping.nested_fields.limit`. diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index 56c000c3d9a..395b4311ec8 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -18,6 +18,8 @@ See <> for more info. -- include::migrate_3_0.asciidoc[] +include::migrate_2_3.asciidoc[] + include::migrate_2_2.asciidoc[] include::migrate_2_1.asciidoc[] diff --git a/docs/reference/migration/migrate_2_3.asciidoc b/docs/reference/migration/migrate_2_3.asciidoc new file mode 100644 index 00000000000..0d741e2adb2 --- /dev/null +++ b/docs/reference/migration/migrate_2_3.asciidoc @@ -0,0 +1,19 @@ +[[breaking-changes-2.3]] +== Breaking changes in 2.3 + +This section discusses the changes that you need to be aware of when migrating +your application to Elasticsearch 2.3. + +* <> + +[[breaking_23_index_apis]] +=== Mappings + +==== Limit to the number of `nested` fields + +Indexing a document with 100 nested fields actually indexes 101 documents as each nested +document is indexed as a separate document. To safeguard against ill-defined mappings +the number of nested fields that can be defined per index has been limited to 50. +This default limit can be changed with the index setting `index.mapping.nested_fields.limit`. +Note that the limit is only checked when new indices are created or mappings are updated. It +will thus only affect existing pre-2.3 indices if their mapping is changed. diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index bffca162ed1..76b1ddb417e 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -15,6 +15,8 @@ your application to Elasticsearch 3.0. * <> * <> * <> +* <> +* <> [[breaking_30_search_changes]] === Warmers @@ -181,6 +183,12 @@ When `max_children` was set to `0` on the `has_child` query then there was no up are allowed to match. This has changed and `0` now really means to zero child documents are allowed. If no upper limit is needed then the `max_children` option shouldn't be defined at all on the `has_child` query. +==== `_parent` field no longer indexed + +The join between parent and child documents no longer relies on indexed fields and therefor from `3.0.0` onwards +the `_parent` indexed field won't be indexed. In order to find documents that referrer to a specific parent id +the new `parent_id` query can be used. The get response and hits inside the search response remain to include +the parent id under the `_parent` key. [[breaking_30_settings_changes]] === Settings changes @@ -560,30 +568,30 @@ and high risk of being misused. The ability to change the thread pool type for a that it is still possible to adjust relevant thread pool parameters for each of the thread pools (e.g., depending on the thread pool type, `keep_alive`, `queue_size`, etc.). +[[breaking_30_cpu_stats]] === System CPU stats The recent CPU usage (as a percent) has been added to the OS stats reported under the node stats API and the cat nodes API. The breaking -change here is that there is a new object in the "os" object in the node -stats response. This object is called "cpu" and includes "percent" and -"load_average" as fields. This moves the "load_average" field that was -previously a top-level field in the "os" object to the "cpu" object. The -format of the "load_average" field has changed to an array of length -three representing the one-minute, five-minute and fifteen-minute load -averages (a value of -1 for any of array components indicates that the -corresponding metric is not available). +change here is that there is a new object in the `os` object in the node +stats response. This object is called `cpu` and includes "percent" and +`load_average` as fields. This moves the `load_average` field that was +previously a top-level field in the `os` object to the `cpu` object. The +format of the `load_average` field has changed to an object with fields +`1m`, `5m`, and `15m` representing the one-minute, five-minute and +fifteen-minute loads respectively. If any of these fields are not present, +it indicates that the corresponding value is not available. -In the cat nodes API response, the "cpu" field is output by default. The -previous "load" field has been removed and is replaced by "load_1m", -"load_5m", and "load_15m" which represent the one-minute, five-minute -and fifteen-minute loads respectively. These values are output by -default, and a value of -1 indicates that the corresponding metric is -not available. +In the cat nodes API response, the `cpu` field is output by default. The +previous `load` field has been removed and is replaced by `load_1m`, +`load_5m`, and `load_15m` which represent the one-minute, five-minute +and fifteen-minute loads respectively. The field will be null if the +corresponding value is not available. -Finally, the API for org.elasticsearch.monitor.os.OsStats has +Finally, the API for `org.elasticsearch.monitor.os.OsStats` has changed. The `getLoadAverage` method has been removed. The value for this can now be obtained from `OsStats.Cpu#getLoadAverage` but it is no -longer a double and is instead an object encapuslating the one-minute, +longer a double and is instead an object encapsulating the one-minute, five-minute and fifteen-minute load averages. Additionally, the recent CPU usage can be obtained from `OsStats.Cpu#getPercent`. @@ -603,6 +611,13 @@ Allocation IDs assign unique identifiers to shard copies. This allows the cluste copies of the same data and track which shards have been active, so that after a cluster restart, shard copies containing only the most recent data can become primaries. +=== Reroute commands + +The reroute command `allocate` has been split into two distinct commands `allocate_replica` and `allocate_empty_primary`. +This was done as we introduced a new `allocate_stale_primary` command. The new `allocate_replica` command corresponds to the +old `allocate` command with `allow_primary` set to false. The new `allocate_empty_primary` command corresponds to the old +`allocate` command with `allow_primary` set to true. + ==== `index.shared_filesystem.recover_on_any_node` changes The behavior of `index.shared_filesystem.recover_on_any_node = true` has been changed. Previously, in the case where no @@ -612,6 +627,7 @@ in the case where shard copies can be found. Previously, a node not holding the holding shard copies were satisfying the allocation deciders. Now, the shard will be assigned to a node having a shard copy, even if none of the nodes holding a shard copy satisfy the allocation deciders. +[[breaking_30_percolator]] === Percolator Adding percolator queries and modifications to existing percolator queries are no longer visible in immediately @@ -627,3 +643,19 @@ The percolate api can no longer accept documents that have fields that don't exi When percolating an existing document then specifying a document in the source of the percolate request is not allowed any more. + +Percolator documents are no longer excluded from the search response. + +[[breaking_30_packaging]] +=== Packaging + +==== Default logging using systemd (since Elasticsearch 2.2.0) + +In previous versions of Elasticsearch, the default logging +configuration routed standard output to /dev/null and standard error to +the journal. However, there are often critical error messages at +startup that are logged to standard output rather than standard error +and these error messages would be lost to the nether. The default has +changed to now route standard output to the journal and standard error +to inherit this setting (these are the defaults for systemd). These +settings can be modified by editing the elasticsearch.service file. diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index 08e5e575f20..39fdae80242 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -300,9 +300,9 @@ location field. You want to compute a decay function depending on how far the hotel is from a given location. You might not immediately see what scale to choose for the gauss function, but you can say something like: "At a distance of 2km from the desired location, the score should -be reduced by one third." +be reduced to one third." The parameter "scale" will then be adjusted automatically to assure that -the score function computes a score of 0.5 for hotels that are 2km away +the score function computes a score of 0.33 for hotels that are 2km away from the desired location. diff --git a/docs/reference/query-dsl/joining-queries.asciidoc b/docs/reference/query-dsl/joining-queries.asciidoc index ae66db8ce81..cfbbf5360b6 100644 --- a/docs/reference/query-dsl/joining-queries.asciidoc +++ b/docs/reference/query-dsl/joining-queries.asciidoc @@ -29,4 +29,6 @@ include::has-child-query.asciidoc[] include::has-parent-query.asciidoc[] +include::parent-id-query.asciidoc[] + diff --git a/docs/reference/query-dsl/parent-id-query.asciidoc b/docs/reference/query-dsl/parent-id-query.asciidoc new file mode 100644 index 00000000000..7386325a766 --- /dev/null +++ b/docs/reference/query-dsl/parent-id-query.asciidoc @@ -0,0 +1,31 @@ +[[query-dsl-parent-id-query]] +=== Parent Id Query + +added[3.0.0] + +The `parent_id` query can be used to find a child document pointing to a particular parent id. + +The actual underlying Lucene field that is used to store to what parent id a child document is referring to +is determined by the child type's `_parent` field. This query helps by selecting the right field based +on the specified child type. Example: + +[source,js] +-------------------------------------------------- +{ + "parent_id" : { + "type" : "blog_tag", + "id" : "1" + } +} +-------------------------------------------------- + +==== Parameters + +This query has two required parameters: + +[horizontal] +`type`:: +The child type. This must be a type with `_parent` field. + +`id`:: +The required parent id select documents must referrer to. \ No newline at end of file diff --git a/docs/reference/search/suggesters/context-suggest.asciidoc b/docs/reference/search/suggesters/context-suggest.asciidoc index a492c37b5b4..ddec2bb193d 100644 --- a/docs/reference/search/suggesters/context-suggest.asciidoc +++ b/docs/reference/search/suggesters/context-suggest.asciidoc @@ -99,7 +99,7 @@ PUT place/shops/1 ... "suggest": { "input": ["timmy's", "starbucks", "dunkin donuts"], - "context": { + "contexts": { "place_type": ["cafe", "food"] <1> } } @@ -273,7 +273,7 @@ PUT place/shops/1 { "suggest": { "input": "timmy's", - "context": [ + "contexts": [ "location": [ { "lat": 43.6624803, @@ -305,7 +305,7 @@ POST place/_suggest "completion" : { "field" : "suggest", "size": 10, - "context": { + "contexts": { "location": { "lat": 43.662, "lon": -79.380 diff --git a/docs/reference/search/suggesters/term-suggest.asciidoc b/docs/reference/search/suggesters/term-suggest.asciidoc index 55fce633481..965a487e293 100644 --- a/docs/reference/search/suggesters/term-suggest.asciidoc +++ b/docs/reference/search/suggesters/term-suggest.asciidoc @@ -9,70 +9,70 @@ suggest text is analyzed before terms are suggested. The suggested terms are provided per analyzed suggest text token. The `term` suggester doesn't take the query into account that is part of request. -==== Common suggest options: +==== Common suggest options: [horizontal] -`text`:: +`text`:: The suggest text. The suggest text is a required option that needs to be set globally or per suggestion. -`field`:: +`field`:: The field to fetch the candidate suggestions from. This is an required option that either needs to be set globally or per - suggestion. + suggestion. -`analyzer`:: +`analyzer`:: The analyzer to analyse the suggest text with. Defaults - to the search analyzer of the suggest field. + to the search analyzer of the suggest field. -`size`:: +`size`:: The maximum corrections to be returned per suggest text - token. + token. -`sort`:: +`sort`:: Defines how suggestions should be sorted per suggest text term. Two possible values: + - ** `score`: Sort by score first, then document frequency and - then the term itself. + ** `score`: Sort by score first, then document frequency and + then the term itself. ** `frequency`: Sort by document frequency first, then similarity - score and then the term itself. + score and then the term itself. + -`suggest_mode`:: +`suggest_mode`:: The suggest mode controls what suggestions are included or controls for what suggest text terms, suggestions should be - suggested. Three possible values can be specified: -+ + suggested. Three possible values can be specified: ++ ** `missing`: Only provide suggestions for suggest text terms that are - not in the index. This is the default. + not in the index. This is the default. ** `popular`: Only suggest suggestions that occur in more docs then - the original suggest text term. + the original suggest text term. ** `always`: Suggest any matching suggestions based on terms in the suggest text. -==== Other term suggest options: +==== Other term suggest options: [horizontal] -`lowercase_terms`:: - Lower cases the suggest text terms after text analysis. +`lowercase_terms`:: + Lower cases the suggest text terms after text analysis. -`max_edits`:: +`max_edits`:: The maximum edit distance candidate suggestions can have in order to be considered as a suggestion. Can only be a value between 1 and 2. Any other value result in an bad request error being - thrown. Defaults to 2. + thrown. Defaults to 2. -`prefix_length`:: +`prefix_length`:: The number of minimal prefix characters that must match in order be a candidate suggestions. Defaults to 1. Increasing this number improves spellcheck performance. Usually misspellings don't - occur in the beginning of terms. (Old name "prefix_len" is deprecated) + occur in the beginning of terms. (Old name "prefix_len" is deprecated) -`min_word_length`:: +`min_word_length`:: The minimum length a suggest text term must have in order to be included. Defaults to 4. (Old name "min_word_len" is deprecated) -`shard_size`:: +`shard_size`:: Sets the maximum number of suggestions to be retrieved from each individual shard. During the reduce phase only the top N suggestions are returned based on the `size` option. Defaults to the @@ -81,24 +81,24 @@ doesn't take the query into account that is part of request. corrections at the cost of performance. Due to the fact that terms are partitioned amongst shards, the shard level document frequencies of spelling corrections may not be precise. Increasing this will make these - document frequencies more precise. + document frequencies more precise. -`max_inspections`:: +`max_inspections`:: A factor that is used to multiply with the `shards_size` in order to inspect more candidate spell corrections on the shard level. Can improve accuracy at the cost of performance. - Defaults to 5. + Defaults to 5. -`min_doc_freq`:: +`min_doc_freq`:: The minimal threshold in number of documents a suggestion should appear in. This can be specified as an absolute number or as a relative percentage of number of documents. This can improve quality by only suggesting high frequency terms. Defaults to 0f and is not enabled. If a value higher than 1 is specified then the number cannot be fractional. The shard level document frequencies are used for - this option. + this option. -`max_term_freq`:: +`max_term_freq`:: The maximum threshold in number of documents a suggest text token can exist in order to be included. Can be a relative percentage number (e.g 0.4) or an absolute number to represent document @@ -108,3 +108,15 @@ doesn't take the query into account that is part of request. usually spelled correctly on top of this also improves the spellcheck performance. The shard level document frequencies are used for this option. + +`string_distance`:: + Which string distance implementation to use for comparing how similar + suggested terms are. Five possible values can be specfied: + `internal` - The default based on damerau_levenshtein but highly optimized + for comparing string distancee for terms inside the index. + `damerau_levenshtein` - String distance algorithm based on + Damerau-Levenshtein algorithm. + `levenstein` - String distance algorithm based on Levenstein edit distance + algorithm. + `jarowinkler` - String distance algorithm based on Jaro-Winkler algorithm. + `ngram` - String distance algorithm based on character n-grams. diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index 07688919e8a..2332e973dee 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -44,7 +44,7 @@ using the <> API, with: [source,js] -------------------------------------------------- -curl localhost:9200/_nodes/process?pretty +curl localhost:9200/_nodes/stats/process?pretty -------------------------------------------------- [float] diff --git a/docs/reference/setup/upgrade.asciidoc b/docs/reference/setup/upgrade.asciidoc index 894f82a6db5..05b63588252 100644 --- a/docs/reference/setup/upgrade.asciidoc +++ b/docs/reference/setup/upgrade.asciidoc @@ -27,7 +27,7 @@ consult this table: |2.x |3.x |<> |======================================================================= -TIP: Take plugins into consideration as well when upgrading. Most plugins will have to be upgraded alongside Elasticsearch, although some plugins accessed primarily through the browser (`_site` plugins) may continue to work given that API changes are compatible. +TIP: Take plugins into consideration as well when upgrading. Plugins must be upgraded alongside Elasticsearch. include::backup.asciidoc[] diff --git a/modules/build.gradle b/modules/build.gradle index 41f7a8873b4..4b88dfd703f 100644 --- a/modules/build.gradle +++ b/modules/build.gradle @@ -39,8 +39,5 @@ subprojects { if (esplugin.isolated == false) { throw new InvalidModelException("Modules cannot disable isolation") } - if (esplugin.jvm == false) { - throw new InvalidModelException("Modules must be jvm plugins") - } } } diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1721183.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index a5332a9ca09..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -60e056d2dd04a81440482b047af0737bc41593d9 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1725675.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..86909f14f82 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +31db8e49e4089772eae8ab2db0ac59bab6fbcd2f \ No newline at end of file diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java index d4ae9e14140..3f8bbf7ad13 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionExecutableScript.java @@ -19,14 +19,14 @@ package org.elasticsearch.script.expression; +import java.util.HashMap; +import java.util.Map; + import org.apache.lucene.expressions.Expression; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptException; -import java.util.HashMap; -import java.util.Map; - /** * A bridge to evaluate an {@link Expression} against a map of variables in the context * of an {@link ExecutableScript}. @@ -90,9 +90,4 @@ public class ExpressionExecutableScript implements ExecutableScript { throw new ScriptException("Error evaluating " + compiledScript, exception); } } - - @Override - public Object unwrap(Object value) { - return value; - } } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java index 4926ceb0290..0f56adeea55 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java @@ -19,6 +19,10 @@ package org.elasticsearch.script.expression; +import java.io.IOException; +import java.util.Collections; +import java.util.Map; + import org.apache.lucene.expressions.Bindings; import org.apache.lucene.expressions.Expression; import org.apache.lucene.expressions.SimpleBindings; @@ -32,10 +36,6 @@ import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.SearchScript; -import java.io.IOException; -import java.util.Collections; -import java.util.Map; - /** * A bridge to evaluate an {@link Expression} against {@link Bindings} in the context * of a {@link SearchScript}. @@ -89,9 +89,6 @@ class ExpressionSearchScript implements SearchScript { @Override public double runAsDouble() { return evaluate(); } - @Override - public Object unwrap(Object value) { return value; } - @Override public void setDocument(int d) { docid = d; diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java index 8dcefce1478..3da22bf8a43 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionRestIT.java @@ -21,21 +21,14 @@ package org.elasticsearch.script.expression; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class ExpressionRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(ExpressionPlugin.class); - } - public ExpressionRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml index 1550f2a7f81..cc777bd826b 100644 --- a/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml +++ b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.modules.0.name: lang-expression } - - match: { nodes.$master.modules.0.jvm: true } diff --git a/modules/lang-groovy/build.gradle b/modules/lang-groovy/build.gradle index 73ad6043f37..6f9b043d8bc 100644 --- a/modules/lang-groovy/build.gradle +++ b/modules/lang-groovy/build.gradle @@ -26,9 +26,6 @@ dependencies { compile 'org.codehaus.groovy:groovy:2.4.4:indy' } -compileJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked,-cast' -compileTestJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked,-cast' - integTest { cluster { systemProperty 'es.script.inline', 'on' diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 98ed5695973..da7ff0f2eb2 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -19,10 +19,15 @@ package org.elasticsearch.script.groovy; -import groovy.lang.Binding; -import groovy.lang.GroovyClassLoader; -import groovy.lang.GroovyCodeSource; -import groovy.lang.Script; +import java.io.IOException; +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.security.AccessControlContext; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.HashMap; +import java.util.Map; + import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.codehaus.groovy.ast.ClassCodeExpressionTransformer; @@ -55,14 +60,10 @@ import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; -import java.io.IOException; -import java.math.BigDecimal; -import java.nio.charset.StandardCharsets; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.HashMap; -import java.util.Map; +import groovy.lang.Binding; +import groovy.lang.GroovyClassLoader; +import groovy.lang.GroovyCodeSource; +import groovy.lang.Script; /** * Provides the infrastructure for Groovy as a scripting language for Elasticsearch @@ -182,6 +183,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri String fake = MessageDigests.toHexString(MessageDigests.sha1().digest(script.getBytes(StandardCharsets.UTF_8))); // same logic as GroovyClassLoader.parseClass() but with a different codesource string: return AccessController.doPrivileged(new PrivilegedAction() { + @Override public Class run() { GroovyCodeSource gcs = new GroovyCodeSource(script, fake, BootstrapInfo.UNTRUSTED_CODEBASE); gcs.setCachable(false); @@ -203,7 +205,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri */ @SuppressWarnings("unchecked") private Script createScript(Object compiledScript, Map vars) throws InstantiationException, IllegalAccessException { - Class scriptClass = (Class) compiledScript; + Class scriptClass = (Class) compiledScript; Script scriptObject = (Script) scriptClass.newInstance(); Binding binding = new Binding(); binding.getVariables().putAll(vars); @@ -211,7 +213,6 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri return scriptObject; } - @SuppressWarnings({"unchecked"}) @Override public ExecutableScript executable(CompiledScript compiledScript, Map vars) { try { @@ -225,7 +226,6 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri } } - @SuppressWarnings({"unchecked"}) @Override public SearchScript search(final CompiledScript compiledScript, final SearchLookup lookup, @Nullable final Map vars) { return new SearchScript() { @@ -288,7 +288,6 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri } } - @SuppressWarnings({"unchecked"}) @Override public void setNextVar(String name, Object value) { variables.put(name, value); @@ -334,12 +333,6 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri public double runAsDouble() { return ((Number) run()).doubleValue(); } - - @Override - public Object unwrap(Object value) { - return value; - } - } /** diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java index 9b2e0041462..fc19a956ff1 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java @@ -23,7 +23,6 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; @@ -35,14 +34,11 @@ import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.lucene.search.function.CombineFunction; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -59,6 +55,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.script.groovy.GroovyScriptEngineService; +import org.elasticsearch.test.ActionRecordingPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; @@ -66,17 +63,13 @@ import org.elasticsearch.test.rest.client.http.HttpResponse; import org.junit.After; import org.junit.Before; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Locale; -import java.util.concurrent.CopyOnWriteArrayList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.node.Node.HTTP_ENABLED; import static org.elasticsearch.rest.RestStatus.OK; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -89,7 +82,6 @@ import static org.hamcrest.Matchers.is; @ClusterScope(scope = SUITE) public class ContextAndHeaderTransportTests extends ESIntegTestCase { - private static final List requests = new CopyOnWriteArrayList<>(); private String randomHeaderKey = randomAsciiOfLength(10); private String randomHeaderValue = randomAsciiOfLength(20); private String queryIndex = "query-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); @@ -100,13 +92,13 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .put("script.indexed", "on") - .put(HTTP_ENABLED, true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .build(); } @Override protected Collection> nodePlugins() { - return pluginList(ActionLoggingPlugin.class, GroovyPlugin.class); + return pluginList(ActionRecordingPlugin.class, GroovyPlugin.class); } @Before @@ -128,7 +120,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { .setSettings(settings).addMapping("type", mapping)); ensureGreen(queryIndex, lookupIndex); - requests.clear(); + ActionRecordingPlugin.clear(); } @After @@ -193,7 +185,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { .get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1); - assertThat(requests, hasSize(greaterThan(0))); + assertThat(ActionRecordingPlugin.allRequests(), hasSize(greaterThan(0))); assertGetRequestsContainHeaders(); } @@ -281,7 +273,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { .execute(); assertThat(response, hasStatus(OK)); - List searchRequests = getRequests(SearchRequest.class); + List searchRequests = ActionRecordingPlugin.requestsOfType(SearchRequest.class); assertThat(searchRequests, hasSize(greaterThan(0))); for (SearchRequest searchRequest : searchRequests) { assertThat(searchRequest.hasHeader(releventHeaderName), is(true)); @@ -290,20 +282,9 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { } } - private List getRequests(Class clazz) { - List results = new ArrayList<>(); - for (ActionRequest request : requests) { - if (request.getClass().equals(clazz)) { - results.add((T) request); - } - } - - return results; - } - - private void assertRequestsContainHeader(Class clazz) { - List classRequests = getRequests(clazz); - for (ActionRequest request : classRequests) { + private void assertRequestsContainHeader(Class> clazz) { + List> classRequests = ActionRecordingPlugin.requestsOfType(clazz); + for (ActionRequest request : classRequests) { assertRequestContainsHeader(request); } } @@ -313,7 +294,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { } private void assertGetRequestsContainHeaders(String index) { - List getRequests = getRequests(GetRequest.class); + List getRequests = ActionRecordingPlugin.requestsOfType(GetRequest.class); assertThat(getRequests, hasSize(greaterThan(0))); for (GetRequest request : getRequests) { @@ -324,7 +305,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { } } - private void assertRequestContainsHeader(ActionRequest request) { + private void assertRequestContainsHeader(ActionRequest request) { String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", randomHeaderKey, request.getClass().getName()); if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; @@ -342,7 +323,9 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { Client transportClient = internalCluster().transportClient(); FilterClient filterClient = new FilterClient(transportClient) { @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + Action action, Request request, + ActionListener listener) { request.putHeader(randomHeaderKey, randomHeaderValue); super.doExecute(action, request, listener); } @@ -350,58 +333,4 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { return filterClient; } - - public static class ActionLoggingPlugin extends Plugin { - - @Override - public String name() { - return "test-action-logging"; - } - - @Override - public String description() { - return "Test action logging"; - } - - @Override - public Collection nodeModules() { - return Collections.singletonList(new ActionLoggingModule()); - } - - public void onModule(ActionModule module) { - module.registerFilter(LoggingFilter.class); - } - } - - public static class ActionLoggingModule extends AbstractModule { - @Override - protected void configure() { - bind(LoggingFilter.class).asEagerSingleton(); - } - - } - - public static class LoggingFilter extends ActionFilter.Simple { - - @Inject - public LoggingFilter(Settings settings) { - super(settings); - } - - @Override - public int order() { - return 999; - } - - @Override - protected boolean apply(String action, ActionRequest request, ActionListener listener) { - requests.add(request); - return true; - } - - @Override - protected boolean apply(String action, ActionResponse response, ActionListener listener) { - return true; - } - } } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java index d79beb186cf..f101303ee82 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java @@ -40,13 +40,13 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -72,7 +72,7 @@ import static org.hamcrest.Matchers.equalTo; public class GeoDistanceTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return pluginList(GroovyPlugin.class, InternalSettingsPlugin.class); } public void testSimpleDistance() throws Exception { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java index a8f78c62c77..9b3e1a342a8 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -65,7 +66,7 @@ public class MinDocCountTests extends AbstractTermsTestCase { return Collections.singleton(GroovyPlugin.class); } - private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); + private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); private static int cardinality; @@ -77,7 +78,6 @@ public class MinDocCountTests extends AbstractTermsTestCase { final List indexRequests = new ArrayList<>(); final Set stringTerms = new HashSet<>(); final LongSet longTerms = new LongHashSet(); - final Set dateTerms = new HashSet<>(); for (int i = 0; i < cardinality; ++i) { String stringTerm; do { @@ -319,7 +319,6 @@ public class MinDocCountTests extends AbstractTermsTestCase { throw ae; } } - } public void testHistogramCountAsc() throws Exception { @@ -372,11 +371,9 @@ public class MinDocCountTests extends AbstractTermsTestCase { .execute().actionGet(); assertSubset(allHisto, (Histogram) response.getAggregations().get("histo"), minDocCount); } - } private void testMinDocCountOnDateHistogram(Histogram.Order order) throws Exception { - final int interval = randomIntBetween(1, 3); final SearchResponse allResponse = client().prepareSearch("idx").setTypes("type") .setSize(0) .setQuery(QUERY) @@ -393,7 +390,5 @@ public class MinDocCountTests extends AbstractTermsTestCase { .execute().actionGet(); assertSubset(allHisto, (Histogram) response.getAggregations().get("histo"), minDocCount); } - } - } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java index f3574982377..2c1d5256379 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptQuerySearchTests.java @@ -53,8 +53,8 @@ public class ScriptQuerySearchTests extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)) // aggressive filter caching so that we can assert on the number of iterations of the script filters - .put(IndexModule.QUERY_CACHE_TYPE, IndexModule.INDEX_QUERY_CACHE) - .put(IndexModule.QUERY_CACHE_EVERYTHING, true) + .put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), IndexModule.INDEX_QUERY_CACHE) + .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true) .build(); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java index 98d53c85174..fb57c545c11 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ScriptedMetricTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; @@ -123,7 +124,7 @@ public class ScriptedMetricTests extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { Settings settings = Settings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) - .put("path.conf", getDataPath("/org/elasticsearch/messy/tests/conf")) + .put(Environment.PATH_CONF_SETTING.getKey(), getDataPath("/org/elasticsearch/messy/tests/conf")) .build(); return settings; } @@ -147,15 +148,15 @@ public class ScriptedMetricTests extends ESIntegTestCase { for (Object object : aggregationList) { assertThat(object, notNullValue()); assertThat(object, instanceOf(Map.class)); - Map map = (Map) object; + Map map = (Map) object; assertThat(map.size(), lessThanOrEqualTo(1)); if (map.size() == 1) { - assertThat(map.get("count"), notNullValue()); - assertThat(map.get("count"), instanceOf(Number.class)); - assertThat((Number) map.get("count"), equalTo((Number) 1)); + assertThat(map.get("count"), notNullValue()); + assertThat(map.get("count"), instanceOf(Number.class)); + assertThat((Number) map.get("count"), equalTo((Number) 1)); numShardsRun++; + } } - } // We don't know how many shards will have documents but we need to make // sure that at least one shard ran the map script assertThat(numShardsRun, greaterThan(0)); @@ -740,6 +741,7 @@ public class ScriptedMetricTests extends ESIntegTestCase { assertThat(scriptedMetric.getName(), equalTo("scripted")); assertThat(scriptedMetric.aggregation(), notNullValue()); assertThat(scriptedMetric.aggregation(), instanceOf(List.class)); + @SuppressWarnings("unchecked") // We'll just get a ClassCastException a couple lines down if we're wrong, its ok. List aggregationResult = (List) scriptedMetric.aggregation(); assertThat(aggregationResult.size(), equalTo(1)); assertThat(aggregationResult.get(0), equalTo(0)); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index 5a56e0f6999..b78c1c264c3 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -339,9 +339,7 @@ public class SearchFieldsTests extends ESIntegTestCase { .execute().actionGet(); client().admin().indices().refresh(refreshRequest()).actionGet(); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) -.addScriptField("s_obj1", new Script("_source.obj1")) + SearchResponse response = client().prepareSearch().setQuery(matchAllQuery()).addScriptField("s_obj1", new Script("_source.obj1")) .addScriptField("s_obj1_test", new Script("_source.obj1.test")).addScriptField("s_obj2", new Script("_source.obj2")) .addScriptField("s_obj2_arr2", new Script("_source.obj2.arr2")).addScriptField("s_arr3", new Script("_source.arr3")) .execute().actionGet(); @@ -355,7 +353,7 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(response.getHits().getAt(0).field("s_obj1_test").value().toString(), equalTo("something")); Map sObj2 = response.getHits().getAt(0).field("s_obj2").value(); - List sObj2Arr2 = (List) sObj2.get("arr2"); + List sObj2Arr2 = (List) sObj2.get("arr2"); assertThat(sObj2Arr2.size(), equalTo(2)); assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1")); assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2")); @@ -365,8 +363,8 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(sObj2Arr2.get(0).toString(), equalTo("arr_value1")); assertThat(sObj2Arr2.get(1).toString(), equalTo("arr_value2")); - List sObj2Arr3 = response.getHits().getAt(0).field("s_arr3").values(); - assertThat(((Map) sObj2Arr3.get(0)).get("arr3_field1").toString(), equalTo("arr3_value1")); + List sObj2Arr3 = response.getHits().getAt(0).field("s_arr3").values(); + assertThat(((Map) sObj2Arr3.get(0)).get("arr3_field1").toString(), equalTo("arr3_value1")); } public void testPartialFields() throws Exception { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java index 8d959022412..bbc2d0789de 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SimpleSortTests.java @@ -53,6 +53,7 @@ import org.elasticsearch.search.sort.ScriptSortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.junit.annotations.TestLogging; import org.hamcrest.Matchers; @@ -105,7 +106,7 @@ import static org.hamcrest.Matchers.nullValue; public class SimpleSortTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return pluginList(GroovyPlugin.class, InternalSettingsPlugin.class); } @TestLogging("action.search.type:TRACE") diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java index b96436dd77f..b73ec250daf 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java @@ -21,21 +21,14 @@ package org.elasticsearch.script.groovy; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class GroovyRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(GroovyPlugin.class); - } - public GroovyRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml index c276bab6495..d5044bbe422 100644 --- a/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml +++ b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.modules.0.name: lang-groovy } - - match: { nodes.$master.modules.0.jvm: true } diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index 4e8e9cc345d..694ddc74606 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -26,8 +26,6 @@ dependencies { compile "com.github.spullara.mustache.java:compiler:0.9.1" } -compileTestJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked' - integTest { cluster { systemProperty 'es.script.inline', 'on' diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java new file mode 100644 index 00000000000..30983395d93 --- /dev/null +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomReflectionObjectHandler.java @@ -0,0 +1,153 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.mustache; + +import com.github.mustachejava.reflect.ReflectionObjectHandler; +import org.elasticsearch.common.util.iterable.Iterables; + +import java.lang.reflect.Array; +import java.util.AbstractMap; +import java.util.Collection; +import java.util.Set; +import java.util.Iterator; +import java.util.Map; +import java.util.HashMap; + +final class CustomReflectionObjectHandler extends ReflectionObjectHandler { + + @Override + public Object coerce(Object object) { + if (object == null) { + return null; + } + + if (object.getClass().isArray()) { + return new ArrayMap(object); + } else if (object instanceof Collection) { + @SuppressWarnings("unchecked") + Collection collection = (Collection) object; + return new CollectionMap(collection); + } else { + return super.coerce(object); + } + } + + final static class ArrayMap extends AbstractMap implements Iterable { + + private final Object array; + private final int length; + + public ArrayMap(Object array) { + this.array = array; + this.length = Array.getLength(array); + } + + @Override + public Object get(Object key) { + if (key instanceof Number) { + return Array.get(array, ((Number) key).intValue()); + } + try { + int index = Integer.parseInt(key.toString()); + return Array.get(array, index); + } catch (NumberFormatException nfe) { + // if it's not a number it is as if the key doesn't exist + return null; + } + } + + @Override + public boolean containsKey(Object key) { + return get(key) != null; + } + + @Override + public Set> entrySet() { + Map map = new HashMap<>(length); + for (int i = 0; i < length; i++) { + map.put(i, Array.get(array, i)); + } + return map.entrySet(); + } + + @Override + public Iterator iterator() { + return new Iterator() { + + int index = 0; + + @Override + public boolean hasNext() { + return index < length; + } + + @Override + public Object next() { + return Array.get(array, index++); + } + }; + } + + } + + final static class CollectionMap extends AbstractMap implements Iterable { + + private final Collection col; + + public CollectionMap(Collection col) { + this.col = col; + } + + @Override + public Object get(Object key) { + if (key instanceof Number) { + return Iterables.get(col, ((Number) key).intValue()); + } + try { + int index = Integer.parseInt(key.toString()); + return Iterables.get(col, index); + } catch (NumberFormatException nfe) { + // if it's not a number it is as if the key doesn't exist + return null; + } + } + + @Override + public boolean containsKey(Object key) { + return get(key) != null; + } + + @Override + public Set> entrySet() { + Map map = new HashMap<>(col.size()); + int i = 0; + for (Object item : col) { + map.put(i++, item); + } + return map.entrySet(); + } + + @Override + public Iterator iterator() { + return col.iterator(); + } + } + +} diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java index 7734d0334bf..38d48b98f4e 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/JsonEscapingMustacheFactory.java @@ -28,13 +28,12 @@ import java.io.Writer; /** * A MustacheFactory that does simple JSON escaping. */ -public final class JsonEscapingMustacheFactory extends DefaultMustacheFactory { - +final class JsonEscapingMustacheFactory extends DefaultMustacheFactory { + @Override public void encode(String value, Writer writer) { try { - JsonStringEncoder utils = new JsonStringEncoder(); - writer.write(utils.quoteAsString(value));; + writer.write(JsonStringEncoder.getInstance().quoteAsString(value)); } catch (IOException e) { throw new MustacheException("Failed to encode value: " + value); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java index f43856cb7b5..685ba6a4aae 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngineService.java @@ -18,7 +18,13 @@ */ package org.elasticsearch.script.mustache; -import com.github.mustachejava.Mustache; +import java.lang.ref.SoftReference; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Collections; +import java.io.Reader; +import java.util.Map; + import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; @@ -34,11 +40,8 @@ import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; -import java.lang.ref.SoftReference; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Collections; -import java.util.Map; +import com.github.mustachejava.Mustache; +import com.github.mustachejava.DefaultMustacheFactory; /** * Main entry point handling template registration, compilation and @@ -48,9 +51,12 @@ import java.util.Map; * process: First compile the string representing the template, the resulting * {@link Mustache} object can then be re-used for subsequent executions. */ -public class MustacheScriptEngineService extends AbstractComponent implements ScriptEngineService { +public final class MustacheScriptEngineService extends AbstractComponent implements ScriptEngineService { public static final String NAME = "mustache"; + static final String CONTENT_TYPE_PARAM = "content_type"; + static final String JSON_CONTENT_TYPE = "application/json"; + static final String PLAIN_TEXT_CONTENT_TYPE = "text/plain"; /** Thread local UTF8StreamWriter to store template execution results in, thread local to save object creation.*/ private static ThreadLocal> utf8StreamWriter = new ThreadLocal<>(); @@ -85,8 +91,21 @@ public class MustacheScriptEngineService extends AbstractComponent implements Sc * */ @Override public Object compile(String template, Map params) { - /** Factory to generate Mustache objects from. */ - return (new JsonEscapingMustacheFactory()).compile(new FastStringReader(template), "query-template"); + String contentType = params.getOrDefault(CONTENT_TYPE_PARAM, JSON_CONTENT_TYPE); + final DefaultMustacheFactory mustacheFactory; + switch (contentType){ + case PLAIN_TEXT_CONTENT_TYPE: + mustacheFactory = new NoneEscapingMustacheFactory(); + break; + case JSON_CONTENT_TYPE: + default: + // assume that the default is json encoding: + mustacheFactory = new JsonEscapingMustacheFactory(); + break; + } + mustacheFactory.setObjectHandler(new CustomReflectionObjectHandler()); + Reader reader = new FastStringReader(template); + return mustacheFactory.compile(reader, "query-template"); } @Override @@ -174,10 +193,5 @@ public class MustacheScriptEngineService extends AbstractComponent implements Sc } return result.bytes(); } - - @Override - public Object unwrap(Object value) { - return value; - } } } diff --git a/core/src/test/java/org/elasticsearch/recovery/SmallTranslogSizeRecoveryIT.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/NoneEscapingMustacheFactory.java similarity index 60% rename from core/src/test/java/org/elasticsearch/recovery/SmallTranslogSizeRecoveryIT.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/NoneEscapingMustacheFactory.java index a39849e0f1c..3539402df98 100644 --- a/core/src/test/java/org/elasticsearch/recovery/SmallTranslogSizeRecoveryIT.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/NoneEscapingMustacheFactory.java @@ -16,18 +16,25 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.script.mustache; -package org.elasticsearch.recovery; +import com.github.mustachejava.DefaultMustacheFactory; +import com.github.mustachejava.MustacheException; -import org.elasticsearch.common.settings.Settings; +import java.io.IOException; +import java.io.Writer; /** - * + * A MustacheFactory that does no string escaping. */ -public class SmallTranslogSizeRecoveryIT extends SimpleRecoveryIT { +final class NoneEscapingMustacheFactory extends DefaultMustacheFactory { @Override - protected Settings recoverySettings() { - return Settings.settingsBuilder().put("index.shard.recovery.translog_size", "3b").build(); + public void encode(String value, Writer writer) { + try { + writer.write(value); + } catch (IOException e) { + throw new MustacheException("Failed to encode value: " + value); + } } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java index 92d15332780..d1275f63141 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.messy.tests; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionModule; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; @@ -29,17 +28,13 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest; import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.client.Client; import org.elasticsearch.client.FilterClient; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -52,6 +47,7 @@ import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.elasticsearch.test.ActionRecordingPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.junit.After; @@ -60,17 +56,14 @@ import org.junit.Before; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.node.Node.HTTP_ENABLED; import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -84,7 +77,6 @@ import static org.hamcrest.Matchers.is; @ClusterScope(scope = SUITE) public class ContextAndHeaderTransportTests extends ESIntegTestCase { - private static final List requests = new CopyOnWriteArrayList<>(); private String randomHeaderKey = randomAsciiOfLength(10); private String randomHeaderValue = randomAsciiOfLength(20); private String queryIndex = "query-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); @@ -95,13 +87,13 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { return settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .put("script.indexed", "on") - .put(HTTP_ENABLED, true) + .put(NetworkModule.HTTP_ENABLED.getKey(), true) .build(); } @Override protected Collection> nodePlugins() { - return pluginList(ActionLoggingPlugin.class, MustachePlugin.class); + return pluginList(ActionRecordingPlugin.class, MustachePlugin.class); } @Before @@ -122,14 +114,13 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { assertAcked(transportClient().admin().indices().prepareCreate(queryIndex) .setSettings(settings).addMapping("type", mapping)); ensureGreen(queryIndex, lookupIndex); - - requests.clear(); } @After public void checkAllRequestsContainHeaders() { assertRequestsContainHeader(IndexRequest.class); assertRequestsContainHeader(RefreshRequest.class); + ActionRecordingPlugin.clear(); } public void testThatIndexedScriptGetRequestInTemplateQueryContainsContextAndHeaders() throws Exception { @@ -216,7 +207,6 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { titles.add("Representative Government"); titles.add("Election"); - List builders = new ArrayList<>(); for (String title: titles) { transportClient().prepareIndex("test", "type1").setSource("title", title).get(); } @@ -272,30 +262,15 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { assertRequestsContainHeader(PutIndexedScriptRequest.class); } - private List getRequests(Class clazz) { - List results = new ArrayList<>(); - for (ActionRequest request : requests) { - if (request.getClass().equals(clazz)) { - results.add((T) request); - } - } - - return results; - } - - private void assertRequestsContainHeader(Class clazz) { - List classRequests = getRequests(clazz); - for (ActionRequest request : classRequests) { + private void assertRequestsContainHeader(Class> clazz) { + List> classRequests = ActionRecordingPlugin.requestsOfType(clazz); + for (ActionRequest request : classRequests) { assertRequestContainsHeader(request); } } - private void assertGetRequestsContainHeaders() { - assertGetRequestsContainHeaders(this.lookupIndex); - } - private void assertGetRequestsContainHeaders(String index) { - List getRequests = getRequests(GetRequest.class); + List getRequests = ActionRecordingPlugin.requestsOfType(GetRequest.class); assertThat(getRequests, hasSize(greaterThan(0))); for (GetRequest request : getRequests) { @@ -306,7 +281,7 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { } } - private void assertRequestContainsHeader(ActionRequest request) { + private void assertRequestContainsHeader(ActionRequest request) { String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", randomHeaderKey, request.getClass().getName()); if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; @@ -324,7 +299,9 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { Client transportClient = internalCluster().transportClient(); FilterClient filterClient = new FilterClient(transportClient) { @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected , Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder> void doExecute( + Action action, Request request, + ActionListener listener) { request.putHeader(randomHeaderKey, randomHeaderValue); super.doExecute(action, request, listener); } @@ -332,58 +309,4 @@ public class ContextAndHeaderTransportTests extends ESIntegTestCase { return filterClient; } - - public static class ActionLoggingPlugin extends Plugin { - - @Override - public String name() { - return "test-action-logging"; - } - - @Override - public String description() { - return "Test action logging"; - } - - @Override - public Collection nodeModules() { - return Collections.singletonList(new ActionLoggingModule()); - } - - public void onModule(ActionModule module) { - module.registerFilter(LoggingFilter.class); - } - } - - public static class ActionLoggingModule extends AbstractModule { - @Override - protected void configure() { - bind(LoggingFilter.class).asEagerSingleton(); - } - - } - - public static class LoggingFilter extends ActionFilter.Simple { - - @Inject - public LoggingFilter(Settings settings) { - super(settings); - } - - @Override - public int order() { - return 999; - } - - @Override - protected boolean apply(String action, ActionRequest request, ActionListener listener) { - requests.add(request); - return true; - } - - @Override - protected boolean apply(String action, ActionResponse response, ActionListener listener) { - return true; - } - } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java index 4b3d3f3ff98..39b2f290cd3 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/RenderSearchTemplateTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.Template; @@ -68,7 +69,7 @@ public class RenderSearchTemplateTests extends ESIntegTestCase { throw new RuntimeException(e); } return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) - .put("path.conf", configDir).build(); + .put(Environment.PATH_CONF_SETTING.getKey(), configDir).build(); } public void testInlineTemplate() { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 66c3376f04e..e005ca5b100 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -18,6 +18,10 @@ */ package org.elasticsearch.messy.tests; +import java.io.IOException; +import java.lang.reflect.Proxy; +import java.util.Collections; + import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.Accountable; @@ -31,6 +35,7 @@ import org.elasticsearch.common.inject.Injector; import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.inject.util.Providers; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; @@ -60,17 +65,15 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.mustache.MustacheScriptEngineService; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPoolModule; import org.junit.After; import org.junit.Before; -import java.io.IOException; -import java.lang.reflect.Proxy; -import java.util.Collections; - import static org.hamcrest.Matchers.containsString; /** @@ -85,8 +88,8 @@ public class TemplateQueryParserTests extends ESTestCase { @Before public void setup() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir().toString()) - .put("path.conf", this.getDataPath("config")) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(Environment.PATH_CONF_SETTING.getKey(), this.getDataPath("config")) .put("name", getClass().getName()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); @@ -100,15 +103,20 @@ public class TemplateQueryParserTests extends ESTestCase { ScriptModule scriptModule = new ScriptModule(settings); // TODO: make this use a mock engine instead of mustache and it will no longer be messy! scriptModule.addScriptEngine(MustacheScriptEngineService.class); + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); injector = new ModulesBuilder().add( new EnvironmentModule(new Environment(settings)), - new SettingsModule(settings, new SettingsFilter(settings)), + settingsModule, new ThreadPoolModule(new ThreadPool(settings)), - new IndicesModule() { + new SearchModule(settings, new NamedWriteableRegistry()) { @Override - public void configure() { - // skip services - bindQueryParsersExtension(); + protected void configureSearch() { + // skip so we don't need transport + } + @Override + protected void configureSuggesters() { + // skip so we don't need IndicesService } }, scriptModule, diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java index 70298266df9..0914fd6cd7c 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TemplateQueryBuilder; import org.elasticsearch.index.query.TemplateQueryParser; @@ -85,7 +86,7 @@ public class TemplateQueryTests extends ESIntegTestCase { @Override public Settings nodeSettings(int nodeOrdinal) { return settingsBuilder().put(super.nodeSettings(nodeOrdinal)) - .put("path.conf", this.getDataPath("config")).build(); + .put(Environment.PATH_CONF_SETTING.getKey(), this.getDataPath("config")).build(); } public void testTemplateInBody() throws IOException { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java index 0c489b3afb1..727d0c4316d 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheRestIT.java @@ -21,21 +21,14 @@ package org.elasticsearch.script.mustache; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class MustacheRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(MustachePlugin.class); - } - public MustacheRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index 8e8c8981493..b388f8a3ad5 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -48,12 +48,13 @@ public class MustacheScriptEngineTests extends ESTestCase { } public void testSimpleParameterReplace() { + Map compileParams = Collections.singletonMap("content_type", "application/json"); { String template = "GET _search {\"query\": " + "{\"boosting\": {" + "\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + "}}, \"negative_boost\": {{boost_val}} } }}"; Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); - BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, Collections.emptyMap())), vars).run(); + BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, compileParams)), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.3 } }}", new String(o.toBytes(), Charset.forName("UTF-8"))); @@ -64,7 +65,7 @@ public class MustacheScriptEngineTests extends ESTestCase { Map vars = new HashMap<>(); vars.put("boost_val", "0.3"); vars.put("body_val", "\"quick brown\""); - BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, Collections.emptyMap())), vars).run(); + BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(template, compileParams)), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"\\\"quick brown\\\"\"}}}, \"negative_boost\": 0.3 } }}", new String(o.toBytes(), Charset.forName("UTF-8"))); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index d8cf7732378..9c560210e2c 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -18,36 +18,142 @@ */ package org.elasticsearch.script.mustache; -import com.github.mustachejava.DefaultMustacheFactory; import com.github.mustachejava.Mustache; -import com.github.mustachejava.MustacheFactory; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.script.CompiledScript; +import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptEngineService; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; -import java.io.StringReader; -import java.io.StringWriter; + +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.HashSet; + +import static java.util.Collections.singleton; +import static java.util.Collections.singletonMap; +import static org.elasticsearch.script.mustache.MustacheScriptEngineService.CONTENT_TYPE_PARAM; +import static org.elasticsearch.script.mustache.MustacheScriptEngineService.JSON_CONTENT_TYPE; +import static org.elasticsearch.script.mustache.MustacheScriptEngineService.PLAIN_TEXT_CONTENT_TYPE; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsString; -/** - * Figure out how Mustache works for the simplest use case. Leaving in here for now for reference. - * */ public class MustacheTests extends ESTestCase { - public void test() { - HashMap scopes = new HashMap<>(); - scopes.put("boost_val", "0.2"); + private ScriptEngineService engine = new MustacheScriptEngineService(Settings.EMPTY); + + public void testBasics() { String template = "GET _search {\"query\": " + "{\"boosting\": {" - + "\"positive\": {\"match\": {\"body\": \"gift\"}}," - + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" - + "}}, \"negative_boost\": {{boost_val}} } }}"; - MustacheFactory f = new DefaultMustacheFactory(); - Mustache mustache = f.compile(new StringReader(template), "example"); - StringWriter writer = new StringWriter(); - mustache.execute(writer, scopes); - writer.flush(); + + "\"positive\": {\"match\": {\"body\": \"gift\"}}," + + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}" + + "}}, \"negative_boost\": {{boost_val}} } }}"; + Map params = Collections.singletonMap("boost_val", "0.2"); + + Mustache mustache = (Mustache) engine.compile(template, Collections.emptyMap()); + CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "my-name", "mustache", mustache); + ExecutableScript result = engine.executable(compiledScript, params); assertEquals( "Mustache templating broken", "GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.2 } }}", - writer.toString()); + ((BytesReference) result.run()).toUtf8() + ); } + + public void testArrayAccess() throws Exception { + String template = "{{data.0}} {{data.1}}"; + CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(template, Collections.emptyMap())); + Map vars = new HashMap<>(); + Object data = randomFrom( + new String[] { "foo", "bar" }, + Arrays.asList("foo", "bar")); + vars.put("data", data); + Object output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + BytesReference bytes = (BytesReference) output; + assertThat(bytes.toUtf8(), equalTo("foo bar")); + + // Sets can come out in any order + Set setData = new HashSet<>(); + setData.add("foo"); + setData.add("bar"); + vars.put("data", setData); + output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + bytes = (BytesReference) output; + assertThat(bytes.toUtf8(), both(containsString("foo")).and(containsString("bar"))); + } + + public void testArrayInArrayAccess() throws Exception { + String template = "{{data.0.0}} {{data.0.1}}"; + CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(template, Collections.emptyMap())); + Map vars = new HashMap<>(); + Object data = randomFrom( + new String[][] { new String[] { "foo", "bar" }}, + Collections.singletonList(new String[] { "foo", "bar" }), + singleton(new String[] { "foo", "bar" }) + ); + vars.put("data", data); + Object output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + BytesReference bytes = (BytesReference) output; + assertThat(bytes.toUtf8(), equalTo("foo bar")); + } + + public void testMapInArrayAccess() throws Exception { + String template = "{{data.0.key}} {{data.1.key}}"; + CompiledScript mustache = new CompiledScript(ScriptService.ScriptType.INLINE, "inline", "mustache", engine.compile(template, Collections.emptyMap())); + Map vars = new HashMap<>(); + Object data = randomFrom( + new Object[] { singletonMap("key", "foo"), singletonMap("key", "bar") }, + Arrays.asList(singletonMap("key", "foo"), singletonMap("key", "bar"))); + vars.put("data", data); + Object output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + BytesReference bytes = (BytesReference) output; + assertThat(bytes.toUtf8(), equalTo("foo bar")); + + // HashSet iteration order isn't fixed + Set setData = new HashSet<>(); + setData.add(singletonMap("key", "foo")); + setData.add(singletonMap("key", "bar")); + vars.put("data", setData); + output = engine.executable(mustache, vars).run(); + assertThat(output, notNullValue()); + assertThat(output, instanceOf(BytesReference.class)); + bytes = (BytesReference) output; + assertThat(bytes.toUtf8(), both(containsString("foo")).and(containsString("bar"))); + } + + public void testEscaping() { + // json string escaping enabled: + Map params = randomBoolean() ? Collections.emptyMap() : Collections.singletonMap(CONTENT_TYPE_PARAM, JSON_CONTENT_TYPE); + Mustache mustache = (Mustache) engine.compile("{ \"field1\": \"{{value}}\"}", Collections.emptyMap()); + CompiledScript compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "name", "mustache", mustache); + ExecutableScript executableScript = engine.executable(compiledScript, Collections.singletonMap("value", "a \"value\"")); + BytesReference rawResult = (BytesReference) executableScript.run(); + String result = rawResult.toUtf8(); + assertThat(result, equalTo("{ \"field1\": \"a \\\"value\\\"\"}")); + + // json string escaping disabled: + mustache = (Mustache) engine.compile("{ \"field1\": \"{{value}}\"}", Collections.singletonMap(CONTENT_TYPE_PARAM, PLAIN_TEXT_CONTENT_TYPE)); + compiledScript = new CompiledScript(ScriptService.ScriptType.INLINE, "name", "mustache", mustache); + executableScript = engine.executable(compiledScript, Collections.singletonMap("value", "a \"value\"")); + rawResult = (BytesReference) executableScript.run(); + result = rawResult.toUtf8(); + assertThat(result, equalTo("{ \"field1\": \"a \"value\"\"}")); + } + } diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml index 9bfea28abfa..195eea7c4b8 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yaml @@ -11,7 +11,6 @@ nodes.info: {} - match: { nodes.$master.modules.0.name: lang-mustache } - - match: { nodes.$master.modules.0.jvm: true } --- "Indexed template": diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 84b4b753063..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1fce4e9b5c4482bb95e8b275c825d112640d6f1e \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1725675.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..f40f87d5286 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +4504d3d993f094ed70585124df56c2be86c2615a \ No newline at end of file diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java index bfb2b96a5ba..8da56d5a72b 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/AnalysisICURestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.analysis.icu.AnalysisICUPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AnalysisICURestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AnalysisICUPlugin.class); - } - public AnalysisICURestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java index d4b2530dbb6..efd60427e23 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuAnalysisTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.analysis; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -32,7 +33,7 @@ import static org.hamcrest.Matchers.instanceOf; public class SimpleIcuAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { Settings settings = settingsBuilder() - .put("path.home", createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); AnalysisService analysisService = createAnalysisService(settings); TokenizerFactory tokenizerFactory = analysisService.tokenizer("icu_tokenizer"); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java index 33c1f337dbd..632f3f539d6 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuCollationTokenFilterTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.KeywordTokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -45,7 +46,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testBasicUsage() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "tr") .put("index.analysis.filter.myCollator.strength", "primary") @@ -61,7 +62,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testNormalization() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "tr") .put("index.analysis.filter.myCollator.strength", "primary") @@ -78,7 +79,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testSecondaryStrength() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "secondary") @@ -96,7 +97,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testIgnorePunctuation() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "primary") @@ -114,7 +115,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testIgnoreWhitespace() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "primary") @@ -135,7 +136,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testNumerics() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.numeric", "true") @@ -152,7 +153,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testIgnoreAccentsButNotCase() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "primary") @@ -173,7 +174,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { */ public void testUpperCaseFirst() throws IOException { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.language", "en") .put("index.analysis.filter.myCollator.strength", "tertiary") @@ -203,7 +204,7 @@ public class SimpleIcuCollationTokenFilterTests extends ESTestCase { String tailoredRules = tailoredCollator.getRules(); Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myCollator.type", "icu_collation") .put("index.analysis.filter.myCollator.rules", tailoredRules) .put("index.analysis.filter.myCollator.strength", "primary") diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java index acdbd9d4dfc..7ebb783d1db 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/SimpleIcuNormalizerCharFilterTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.analysis; import com.ibm.icu.text.Normalizer2; import org.apache.lucene.analysis.CharFilter; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import java.io.StringReader; @@ -34,7 +35,7 @@ import static org.elasticsearch.index.analysis.AnalysisTestUtils.createAnalysisS public class SimpleIcuNormalizerCharFilterTests extends ESTestCase { public void testDefaultSetting() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.char_filter.myNormalizerChar.type", "icu_normalizer") .build(); AnalysisService analysisService = createAnalysisService(settings); @@ -57,7 +58,7 @@ public class SimpleIcuNormalizerCharFilterTests extends ESTestCase { public void testNameAndModeSetting() throws Exception { Settings settings = Settings.settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.char_filter.myNormalizerChar.type", "icu_normalizer") .put("index.analysis.char_filter.myNormalizerChar.name", "nfkc") .put("index.analysis.char_filter.myNormalizerChar.mode", "decompose") diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 429f8b59b3e..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f104f306fef9d3033db026705043e9cbd145aba5 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1725675.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..c54e995c417 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +15555d41d27bb398b6736be85a5eca4ca224b85d \ No newline at end of file diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java index cb9435e430e..ae51e491d6b 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/AnalysisKuromojiRestIT.java @@ -21,23 +21,14 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.analysis.kuromoji.AnalysisKuromojiPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AnalysisKuromojiRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AnalysisKuromojiPlugin.class); - } - - public AnalysisKuromojiRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java index 3adb8202833..016053810d2 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiAnalysisTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.kuromoji.AnalysisKuromojiPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.io.InputStream; @@ -198,16 +199,17 @@ public class KuromojiAnalysisTests extends ESTestCase { String json = "/org/elasticsearch/index/analysis/kuromoji_analysis.json"; Settings settings = Settings.settingsBuilder() - .put("path.home", home) + .put(Environment.PATH_HOME_SETTING.getKey(), home) .loadFromStream(json, getClass().getResourceAsStream(json)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); - + final SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); Index index = new Index("test"); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisKuromojiPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), + Injector parentInjector = new ModulesBuilder().add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index a814cf5cb03..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40b2034a6aed4c3fe0509016fab4f7bbb37a5fc8 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1725675.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..e14685f7ace --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +9d43a338338a6c88e8071a0e3eeb51f4d9d0364a \ No newline at end of file diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java index 98380a07176..9d66bf24357 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/AnalysisPhoneticRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.analysis.AnalysisPhoneticPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AnalysisPhoneticRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AnalysisPhoneticPlugin.class); - } - public AnalysisPhoneticRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java index 0b6a4027685..6dd341346e5 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/SimplePhoneticAnalysisTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.AnalysisPhoneticPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -47,7 +48,7 @@ public class SimplePhoneticAnalysisTests extends ESTestCase { String yaml = "/org/elasticsearch/index/analysis/phonetic-1.yml"; Settings settings = settingsBuilder().loadFromStream(yaml, getClass().getResourceAsStream(yaml)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); AnalysisService analysisService = testSimpleConfiguration(settings); TokenFilterFactory filterFactory = analysisService.tokenFilter("phonetic"); @@ -58,7 +59,9 @@ public class SimplePhoneticAnalysisTests extends ESTestCase { Index index = new Index("test"); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisPhoneticPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); + Injector parentInjector = new ModulesBuilder().add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index af3c4a277ea..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e117a87f4338be80b0a052d2ce454d5086aa57f1 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1725675.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..505e90d11b1 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +b66c95032c5ca41ce7b85519c64aab4e9a233f78 \ No newline at end of file diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java index 2a76c21d353..16113b2b7ac 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/AnalysisSmartChineseRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.analysis.smartcn.AnalysisSmartChinesePlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AnalysisSmartChineseRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AnalysisSmartChinesePlugin.class); - } - public AnalysisSmartChineseRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java index cfc2b28ec6a..d33d36d4c60 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SimpleSmartChineseAnalysisTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.smartcn.AnalysisSmartChinesePlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -46,12 +47,14 @@ public class SimpleSmartChineseAnalysisTests extends ESTestCase { public void testDefaultsIcuAnalysis() throws IOException { Index index = new Index("test"); Settings settings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisSmartChinesePlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); + Injector parentInjector = new ModulesBuilder().add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); final AnalysisService analysisService = parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1721183.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1721183.jar.sha1 deleted file mode 100644 index 899769b0e29..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1721183.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -703dd91fccdc1c4662c80e412a449097c0578d83 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1725675.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1725675.jar.sha1 new file mode 100644 index 00000000000..c9102990806 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.5.0-snapshot-1725675.jar.sha1 @@ -0,0 +1 @@ +4f41bacd77ce372f10f2c57ab516b2ce9aa71173 \ No newline at end of file diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java index c99ff75aebf..330ad87af74 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/AnalysisPolishRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.analysis; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.analysis.stempel.AnalysisStempelPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AnalysisPolishRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AnalysisStempelPlugin.class); - } - public AnalysisPolishRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java index f3ce4326afb..05c7252bdf7 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/PolishAnalysisTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.stempel.AnalysisStempelPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -49,14 +50,16 @@ public class PolishAnalysisTests extends ESTestCase { public void testDefaultsPolishAnalysis() throws IOException { Index index = new Index("test"); Settings settings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisStempelPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); + Injector parentInjector = new ModulesBuilder().add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java index a68f958580e..306a835c36e 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/SimplePolishTokenFilterTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugin.analysis.stempel.AnalysisStempelPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; import java.io.StringReader; @@ -58,7 +59,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { Index index = new Index("test"); Settings settings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("index.analysis.filter.myStemmer.type", "polish_stem") .build(); AnalysisService analysisService = createAnalysisService(index, settings); @@ -80,7 +81,7 @@ public class SimplePolishTokenFilterTests extends ESTestCase { Index index = new Index("test"); Settings settings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); AnalysisService analysisService = createAnalysisService(index, settings); @@ -100,7 +101,9 @@ public class SimplePolishTokenFilterTests extends ESTestCase { private AnalysisService createAnalysisService(Index index, Settings settings) throws IOException { AnalysisModule analysisModule = new AnalysisModule(new Environment(settings)); new AnalysisStempelPlugin().onModule(analysisModule); - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(settings, new SettingsFilter(settings)), + SettingsModule settingsModule = new SettingsModule(settings, new SettingsFilter(settings)); + settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED); + Injector parentInjector = new ModulesBuilder().add(settingsModule, new EnvironmentModule(new Environment(settings)), analysisModule) .createInjector(); return parentInjector.getInstance(AnalysisRegistry.class).build(IndexSettingsModule.newIndexSettings(index, settings)); diff --git a/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java b/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java index 038f117b83f..9674d541354 100644 --- a/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java +++ b/plugins/delete-by-query/src/test/java/org/elasticsearch/plugin/deletebyquery/test/rest/DeleteByQueryRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.plugin.deletebyquery.test.rest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.deletebyquery.DeleteByQueryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class DeleteByQueryRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(DeleteByQueryPlugin.class); - } - public DeleteByQueryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/discovery-azure/build.gradle b/plugins/discovery-azure/build.gradle index 2451672f1d1..ec4ef7cb625 100644 --- a/plugins/discovery-azure/build.gradle +++ b/plugins/discovery-azure/build.gradle @@ -56,8 +56,6 @@ dependencyLicenses { mapping from: /jaxb-.*/, to: 'jaxb' } -compileJava.options.compilerArgs << '-Xlint:-path,-unchecked' - thirdPartyAudit.excludes = [ // classes are missing 'javax.servlet.ServletContextEvent', diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java index 5215b90e7e1..d48eed9e507 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/AzureDiscoveryModule.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.azure.AzureDiscovery; /** @@ -73,7 +74,7 @@ public class AzureDiscoveryModule extends AbstractModule { */ public static boolean isDiscoveryReady(Settings settings, ESLogger logger) { // User set discovery.type: azure - if (!AzureDiscovery.AZURE.equalsIgnoreCase(settings.get("discovery.type"))) { + if (!AzureDiscovery.AZURE.equalsIgnoreCase(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings))) { logger.trace("discovery.type not set to {}", AzureDiscovery.AZURE); return false; } diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java index c79a7450929..de2343d9d87 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/cloud/azure/management/AzureComputeService.java @@ -20,6 +20,11 @@ package org.elasticsearch.cloud.azure.management; import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider; + +import java.util.Locale; /** * @@ -39,9 +44,11 @@ public interface AzureComputeService { } static public final class Discovery { - public static final String REFRESH = "discovery.azure.refresh_interval"; + public static final Setting REFRESH_SETTING = Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), false, Setting.Scope.CLUSTER); + + public static final Setting HOST_TYPE_SETTING = new Setting<>("discovery.azure.host.type", + AzureUnicastHostsProvider.HostType.PRIVATE_IP.name(), AzureUnicastHostsProvider.HostType::fromString, false, Setting.Scope.CLUSTER); - public static final String HOST_TYPE = "discovery.azure.host.type"; public static final String ENDPOINT_NAME = "discovery.azure.endpoint.name"; public static final String DEPLOYMENT_NAME = "discovery.azure.deployment.name"; public static final String DEPLOYMENT_SLOT = "discovery.azure.deployment.slot"; diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java index 690ab623bd9..aac167fc673 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/discovery/azure/AzureUnicastHostsProvider.java @@ -45,7 +45,6 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; -import java.util.Locale; /** * @@ -68,7 +67,7 @@ public class AzureUnicastHostsProvider extends AbstractComponent implements Unic return hostType; } } - return null; + throw new IllegalArgumentException("invalid value for host type [" + type + "]"); } } @@ -118,16 +117,9 @@ public class AzureUnicastHostsProvider extends AbstractComponent implements Unic this.networkService = networkService; this.version = version; - this.refreshInterval = settings.getAsTime(Discovery.REFRESH, TimeValue.timeValueSeconds(0)); + this.refreshInterval = Discovery.REFRESH_SETTING.get(settings); - String strHostType = settings.get(Discovery.HOST_TYPE, HostType.PRIVATE_IP.name()).toUpperCase(Locale.ROOT); - HostType tmpHostType = HostType.fromString(strHostType); - if (tmpHostType == null) { - logger.warn("wrong value for [{}]: [{}]. falling back to [{}]...", Discovery.HOST_TYPE, - strHostType, HostType.PRIVATE_IP.name().toLowerCase(Locale.ROOT)); - tmpHostType = HostType.PRIVATE_IP; - } - this.hostType = tmpHostType; + this.hostType = Discovery.HOST_TYPE_SETTING.get(settings); this.publicEndpointName = settings.get(Discovery.ENDPOINT_NAME, "elasticsearch"); // Deployment name could be set with discovery.azure.deployment.name diff --git a/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java b/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java index e61a82a6ceb..418bd1291ed 100644 --- a/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java +++ b/plugins/discovery-azure/src/main/java/org/elasticsearch/plugin/discovery/azure/AzureDiscoveryPlugin.java @@ -20,10 +20,12 @@ package org.elasticsearch.plugin.discovery.azure; import org.elasticsearch.cloud.azure.AzureDiscoveryModule; +import org.elasticsearch.cloud.azure.management.AzureComputeService; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.azure.AzureDiscovery; import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider; @@ -66,4 +68,9 @@ public class AzureDiscoveryPlugin extends Plugin { discoveryModule.addUnicastHostProvider(AzureUnicastHostsProvider.class); } } + + public void onModule(SettingsModule settingsModule) { + settingsModule.registerSetting(AzureComputeService.Discovery.REFRESH_SETTING); + settingsModule.registerSetting(AzureComputeService.Discovery.HOST_TYPE_SETTING); + } } diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java index bf1ba94d309..9babfb4c2ad 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java @@ -48,7 +48,7 @@ public abstract class AbstractAzureComputeServiceTestCase extends ESIntegTestCas // We add a fake subscription_id to start mock compute service builder.put(Management.SUBSCRIPTION_ID, "fake") - .put(Discovery.REFRESH, "5s") + .put(Discovery.REFRESH_SETTING.getKey(), "5s") .put(Management.KEYSTORE_PATH, "dummy") .put(Management.KEYSTORE_PASSWORD, "dummy") .put(Management.SERVICE_NAME, "dummy"); diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java index 9747543a77b..ad7140f5020 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -54,7 +55,7 @@ public abstract class AbstractAzureTestCase extends ESIntegTestCase { protected Settings readSettingsFromFile() { Settings.Builder settings = Settings.builder(); - settings.put("path.home", createTempDir()); + settings.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); // if explicit, just load it and don't load from env try { diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java index 63888837cdd..131f73d1ca9 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureDiscoveryRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.discovery.azure; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AzureDiscoveryRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AzureDiscoveryPlugin.class); - } - public AzureDiscoveryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java index 19d6d038e8d..0f2d96573ef 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureMinimumMasterNodesTests.java @@ -53,8 +53,8 @@ public class AzureMinimumMasterNodesTests extends AbstractAzureComputeServiceTes .put(super.nodeSettings(nodeOrdinal)) .put("discovery.zen.minimum_master_nodes", 2) // Make the test run faster - .put(ZenDiscovery.SETTING_JOIN_TIMEOUT, "50ms") - .put(ZenDiscovery.SETTING_PING_TIMEOUT, "10ms") + .put(ZenDiscovery.JOIN_TIMEOUT_SETTING.getKey(), "50ms") + .put(ZenDiscovery.PING_TIMEOUT_SETTING.getKey(), "10ms") .put("discovery.initial_state_timeout", "100ms"); return builder.build(); } diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java index cc4021fb78c..6a6ef0944ce 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureSimpleTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.cloud.azure.management.AzureComputeService.Management; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, @@ -40,7 +41,7 @@ public class AzureSimpleTests extends AbstractAzureComputeServiceTestCase { public void testOneNodeDhouldRunUsingPrivateIp() { Settings.Builder settings = Settings.settingsBuilder() .put(Management.SERVICE_NAME, "dummy") - .put(Discovery.HOST_TYPE, "private_ip"); + .put(Discovery.HOST_TYPE_SETTING.getKey(), "private_ip"); logger.info("--> start one node"); internalCluster().startNode(settings); @@ -53,7 +54,7 @@ public class AzureSimpleTests extends AbstractAzureComputeServiceTestCase { public void testOneNodeShouldRunUsingPublicIp() { Settings.Builder settings = Settings.settingsBuilder() .put(Management.SERVICE_NAME, "dummy") - .put(Discovery.HOST_TYPE, "public_ip"); + .put(Discovery.HOST_TYPE_SETTING.getKey(), "public_ip"); logger.info("--> start one node"); internalCluster().startNode(settings); @@ -66,13 +67,14 @@ public class AzureSimpleTests extends AbstractAzureComputeServiceTestCase { public void testOneNodeShouldRunUsingWrongSettings() { Settings.Builder settings = Settings.settingsBuilder() .put(Management.SERVICE_NAME, "dummy") - .put(Discovery.HOST_TYPE, "do_not_exist"); + .put(Discovery.HOST_TYPE_SETTING.getKey(), "do_not_exist"); logger.info("--> start one node"); - internalCluster().startNode(settings); - assertThat(client().admin().cluster().prepareState().setMasterNodeTimeout("1s").execute().actionGet().getState().nodes().masterNodeId(), notNullValue()); - - // We expect having 1 node as part of the cluster, let's test that - checkNumberOfNodes(1); + try { + internalCluster().startNode(settings); + fail("Expected IllegalArgumentException on startup"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("invalid value for host type [do_not_exist]")); + } } } diff --git a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java index 2d134d0cc83..880c05ed121 100644 --- a/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java +++ b/plugins/discovery-azure/src/test/java/org/elasticsearch/discovery/azure/AzureTwoStartedNodesTests.java @@ -42,7 +42,7 @@ public class AzureTwoStartedNodesTests extends AbstractAzureComputeServiceTestCa public void testTwoNodesShouldRunUsingPrivateIp() { Settings.Builder settings = Settings.settingsBuilder() .put(Management.SERVICE_NAME, "dummy") - .put(Discovery.HOST_TYPE, "private_ip"); + .put(Discovery.HOST_TYPE_SETTING.getKey(), "private_ip"); logger.info("--> start first node"); internalCluster().startNode(settings); @@ -60,7 +60,7 @@ public class AzureTwoStartedNodesTests extends AbstractAzureComputeServiceTestCa public void testTwoNodesShouldRunUsingPublicIp() { Settings.Builder settings = Settings.settingsBuilder() .put(Management.SERVICE_NAME, "dummy") - .put(Discovery.HOST_TYPE, "public_ip"); + .put(Discovery.HOST_TYPE_SETTING.getKey(), "public_ip"); logger.info("--> start first node"); internalCluster().startNode(settings); diff --git a/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml b/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml index 51ba41e7e82..7a5acd1f001 100644 --- a/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml +++ b/plugins/discovery-azure/src/test/resources/rest-api-spec/test/discovery_azure/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: discovery-azure } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java index 09a0116fc6d..4aac319cc8e 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/Ec2Module.java @@ -22,6 +22,7 @@ package org.elasticsearch.cloud.aws; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.ec2.Ec2Discovery; public class Ec2Module extends AbstractModule { @@ -37,7 +38,7 @@ public class Ec2Module extends AbstractModule { */ public static boolean isEc2DiscoveryActive(Settings settings, ESLogger logger) { // User set discovery.type: ec2 - if (!Ec2Discovery.EC2.equalsIgnoreCase(settings.get("discovery.type"))) { + if (!Ec2Discovery.EC2.equalsIgnoreCase(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings))) { logger.trace("discovery.type not set to {}", Ec2Discovery.EC2); return false; } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java index ec9155c51b3..e5931dc8b8e 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugin.discovery.ec2.Ec2DiscoveryPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ThirdParty; @@ -40,7 +41,7 @@ public abstract class AbstractAwsTestCase extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settings = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .extendArray("plugin.types", Ec2DiscoveryPlugin.class.getName()) .put("cloud.aws.test.random", randomInt()) .put("cloud.aws.test.write_failures", 0.1) diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java index 57021a085e6..24ccf82a3d8 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/DiscoveryEc2RestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.cloud.aws; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.discovery.ec2.Ec2DiscoveryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class DiscoveryEc2RestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(Ec2DiscoveryPlugin.class); - } - public DiscoveryEc2RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yaml b/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yaml index e3b7844fd54..d612c75db97 100644 --- a/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yaml +++ b/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: discovery-ec2 } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 6f4459ef753..b888b817679 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -25,8 +25,6 @@ dependencyLicenses { mapping from: /google-.*/, to: 'google' } -compileJava.options.compilerArgs << '-Xlint:-rawtypes,-unchecked' - test { // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index 5bb5e27ce64..97e637abcc2 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -21,6 +21,7 @@ package org.elasticsearch.plugin.discovery.gce; import com.google.api.client.http.HttpHeaders; import com.google.api.client.util.ClassInfo; + import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.gce.GceComputeService; import org.elasticsearch.cloud.gce.GceModule; @@ -91,6 +92,7 @@ public class GceDiscoveryPlugin extends Plugin { } @Override + @SuppressWarnings("rawtypes") // Supertype uses raw type public Collection> nodeServices() { Collection> services = new ArrayList<>(); if (isDiscoveryAlive(settings, logger)) { @@ -113,7 +115,7 @@ public class GceDiscoveryPlugin extends Plugin { */ public static boolean isDiscoveryAlive(Settings settings, ESLogger logger) { // User set discovery.type: gce - if (GceDiscovery.GCE.equalsIgnoreCase(settings.get("discovery.type")) == false) { + if (GceDiscovery.GCE.equalsIgnoreCase(DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings)) == false) { logger.debug("discovery.type not set to {}", GceDiscovery.GCE); return false; } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java index 1a218394b7d..891dd156aac 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGCERestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.discovery.gce; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.discovery.gce.GceDiscoveryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class DiscoveryGCERestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(GceDiscoveryPlugin.class); - } - public DiscoveryGCERestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yaml b/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yaml index 8f5fbdc4ab4..6f48aa6c29e 100644 --- a/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yaml +++ b/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: discovery-gce } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryRestIT.java b/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryRestIT.java index d75c038a500..c6af20c011e 100644 --- a/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryRestIT.java +++ b/plugins/discovery-multicast/src/test/java/org/elasticsearch/plugin/discovery/multicast/MulticastDiscoveryRestIT.java @@ -21,21 +21,14 @@ package org.elasticsearch.plugin.discovery.multicast; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class MulticastDiscoveryRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(MulticastDiscoveryPlugin.class); - } - public MulticastDiscoveryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/discovery-multicast/src/test/resources/rest-api-spec/test/discovery_multicast/10_basic.yaml b/plugins/discovery-multicast/src/test/resources/rest-api-spec/test/discovery_multicast/10_basic.yaml index 4c110238aea..36172fa2c33 100644 --- a/plugins/discovery-multicast/src/test/resources/rest-api-spec/test/discovery_multicast/10_basic.yaml +++ b/plugins/discovery-multicast/src/test/resources/rest-api-spec/test/discovery_multicast/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: discovery-multicast } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java index 2e9039ed67f..74573a79289 100644 --- a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java +++ b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleRestIT.java @@ -21,21 +21,14 @@ package org.elasticsearch.plugin.example; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class JvmExampleRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(JvmExamplePlugin.class); - } - public JvmExampleRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/jvm-example/src/test/resources/rest-api-spec/test/jvm_example/10_basic.yaml b/plugins/jvm-example/src/test/resources/rest-api-spec/test/jvm_example/10_basic.yaml index 169b924f83d..c671fe2e8ba 100644 --- a/plugins/jvm-example/src/test/resources/rest-api-spec/test/jvm_example/10_basic.yaml +++ b/plugins/jvm-example/src/test/resources/rest-api-spec/test/jvm_example/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: jvm-example } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java index 18f18372b90..8039715c3d1 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/LangJavaScriptRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.script.javascript; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.javascript.JavaScriptPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class LangJavaScriptRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(JavaScriptPlugin.class); - } - public LangJavaScriptRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/lang-plan-a/build.gradle b/plugins/lang-plan-a/build.gradle index 810f0df4e16..f40bf4fd8e7 100644 --- a/plugins/lang-plan-a/build.gradle +++ b/plugins/lang-plan-a/build.gradle @@ -35,9 +35,6 @@ dependencyLicenses { mapping from: /asm-.*/, to: 'asm' } -compileJava.options.compilerArgs << '-Xlint:-cast,-rawtypes' -compileTestJava.options.compilerArgs << '-Xlint:-unchecked' - // regeneration logic, comes in via ant right now // don't port it to gradle, it works fine. diff --git a/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 b/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 index 1b177a43381..851d924b33f 100644 --- a/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 +++ b/plugins/lang-plan-a/src/main/antlr/PlanAParser.g4 @@ -34,18 +34,23 @@ statement | CONTINUE SEMICOLON? # continue | BREAK SEMICOLON? # break | RETURN expression SEMICOLON? # return - | TRY block ( CATCH LP ( TYPE ID ) RP block )+ # try + | TRY block trap+ # try | THROW expression SEMICOLON? # throw | expression SEMICOLON? # expr ; block - : LBRACK statement* RBRACK # multiple + : LBRACK statement+ RBRACK # multiple | statement # single ; empty - : SEMICOLON + : emptyscope + | SEMICOLON + ; + +emptyscope + : LBRACK RBRACK ; initializer @@ -69,6 +74,10 @@ declvar : ID ( ASSIGN expression )? ; +trap + : CATCH LP ( TYPE ID ) RP ( block | emptyscope ) + ; + expression : LP expression RP # precedence | ( OCTAL | HEX | INTEGER | DECIMAL ) # numeric diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java deleted file mode 100644 index 9788e63c3d7..00000000000 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Adapter.java +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.plan.a; - -import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.tree.ParseTree; - -import java.util.HashMap; -import java.util.Map; - -import static org.elasticsearch.plan.a.Definition.Cast; -import static org.elasticsearch.plan.a.Definition.Type; -import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; -import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; - -class Adapter { - static class StatementMetadata { - final ParserRuleContext source; - - boolean last; - - boolean allExit; - boolean allReturn; - boolean anyReturn; - boolean allBreak; - boolean anyBreak; - boolean allContinue; - boolean anyContinue; - - private StatementMetadata(final ParserRuleContext source) { - this.source = source; - - last = false; - - allExit = false; - allReturn = false; - anyReturn = false; - allBreak = false; - anyBreak = false; - allContinue = false; - anyContinue = false; - } - } - - static class ExpressionMetadata { - final ParserRuleContext source; - - boolean read; - boolean statement; - - Object preConst; - Object postConst; - boolean isNull; - - Type to; - Type from; - boolean explicit; - boolean typesafe; - - Cast cast; - - private ExpressionMetadata(final ParserRuleContext source) { - this.source = source; - - read = true; - statement = false; - - preConst = null; - postConst = null; - isNull = false; - - to = null; - from = null; - explicit = false; - typesafe = true; - - cast = null; - } - } - - static class ExternalMetadata { - final ParserRuleContext source; - - boolean read; - ParserRuleContext storeExpr; - int token; - boolean pre; - boolean post; - - int scope; - Type current; - boolean statik; - boolean statement; - Object constant; - - private ExternalMetadata(final ParserRuleContext source) { - this.source = source; - - read = false; - storeExpr = null; - token = 0; - pre = false; - post = false; - - scope = 0; - current = null; - statik = false; - statement = false; - constant = null; - } - } - - static class ExtNodeMetadata { - final ParserRuleContext parent; - final ParserRuleContext source; - - Object target; - boolean last; - - Type type; - Type promote; - - Cast castFrom; - Cast castTo; - - private ExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) { - this.parent = parent; - this.source = source; - - target = null; - last = false; - - type = null; - promote = null; - - castFrom = null; - castTo = null; - } - } - - static String error(final ParserRuleContext ctx) { - return "Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: "; - } - - final Definition definition; - final String source; - final ParserRuleContext root; - final CompilerSettings settings; - - private final Map statementMetadata; - private final Map expressionMetadata; - private final Map externalMetadata; - private final Map extNodeMetadata; - - Adapter(final Definition definition, final String source, final ParserRuleContext root, final CompilerSettings settings) { - this.definition = definition; - this.source = source; - this.root = root; - this.settings = settings; - - statementMetadata = new HashMap<>(); - expressionMetadata = new HashMap<>(); - externalMetadata = new HashMap<>(); - extNodeMetadata = new HashMap<>(); - } - - StatementMetadata createStatementMetadata(final ParserRuleContext source) { - final StatementMetadata sourcesmd = new StatementMetadata(source); - statementMetadata.put(source, sourcesmd); - - return sourcesmd; - } - - StatementMetadata getStatementMetadata(final ParserRuleContext source) { - final StatementMetadata sourcesmd = statementMetadata.get(source); - - if (sourcesmd == null) { - throw new IllegalStateException(error(source) + "Statement metadata does not exist at" + - " the parse node with text [" + source.getText() + "]."); - } - - return sourcesmd; - } - - ExpressionContext updateExpressionTree(ExpressionContext source) { - if (source instanceof PrecedenceContext) { - final ParserRuleContext parent = source.getParent(); - int index = 0; - - for (final ParseTree child : parent.children) { - if (child == source) { - break; - } - - ++index; - } - - while (source instanceof PrecedenceContext) { - source = ((PrecedenceContext)source).expression(); - } - - parent.children.set(index, source); - } - - return source; - } - - ExpressionMetadata createExpressionMetadata(ParserRuleContext source) { - final ExpressionMetadata sourceemd = new ExpressionMetadata(source); - expressionMetadata.put(source, sourceemd); - - return sourceemd; - } - - ExpressionMetadata getExpressionMetadata(final ParserRuleContext source) { - final ExpressionMetadata sourceemd = expressionMetadata.get(source); - - if (sourceemd == null) { - throw new IllegalStateException(error(source) + "Expression metadata does not exist at" + - " the parse node with text [" + source.getText() + "]."); - } - - return sourceemd; - } - - ExternalMetadata createExternalMetadata(final ParserRuleContext source) { - final ExternalMetadata sourceemd = new ExternalMetadata(source); - externalMetadata.put(source, sourceemd); - - return sourceemd; - } - - ExternalMetadata getExternalMetadata(final ParserRuleContext source) { - final ExternalMetadata sourceemd = externalMetadata.get(source); - - if (sourceemd == null) { - throw new IllegalStateException(error(source) + "External metadata does not exist at" + - " the parse node with text [" + source.getText() + "]."); - } - - return sourceemd; - } - - ExtNodeMetadata createExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) { - final ExtNodeMetadata sourceemd = new ExtNodeMetadata(parent, source); - extNodeMetadata.put(source, sourceemd); - - return sourceemd; - } - - ExtNodeMetadata getExtNodeMetadata(final ParserRuleContext source) { - final ExtNodeMetadata sourceemd = extNodeMetadata.get(source); - - if (sourceemd == null) { - throw new IllegalStateException(error(source) + "External metadata does not exist at" + - " the parse node with text [" + source.getText() + "]."); - } - - return sourceemd; - } -} diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java index eb2681cfba8..77769736bf1 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Analyzer.java @@ -20,6 +20,72 @@ package org.elasticsearch.plan.a; import org.antlr.v4.runtime.ParserRuleContext; +import org.elasticsearch.plan.a.Definition.Cast; +import org.elasticsearch.plan.a.Definition.Constructor; +import org.elasticsearch.plan.a.Definition.Field; +import org.elasticsearch.plan.a.Definition.Method; +import org.elasticsearch.plan.a.Definition.Pair; +import org.elasticsearch.plan.a.Definition.Sort; +import org.elasticsearch.plan.a.Definition.Struct; +import org.elasticsearch.plan.a.Definition.Transform; +import org.elasticsearch.plan.a.Definition.Type; +import org.elasticsearch.plan.a.Metadata.ExpressionMetadata; +import org.elasticsearch.plan.a.Metadata.ExtNodeMetadata; +import org.elasticsearch.plan.a.Metadata.ExternalMetadata; +import org.elasticsearch.plan.a.Metadata.StatementMetadata; +import org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; +import org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; +import org.elasticsearch.plan.a.PlanAParser.AssignmentContext; +import org.elasticsearch.plan.a.PlanAParser.BinaryContext; +import org.elasticsearch.plan.a.PlanAParser.BlockContext; +import org.elasticsearch.plan.a.PlanAParser.BoolContext; +import org.elasticsearch.plan.a.PlanAParser.BreakContext; +import org.elasticsearch.plan.a.PlanAParser.CastContext; +import org.elasticsearch.plan.a.PlanAParser.CharContext; +import org.elasticsearch.plan.a.PlanAParser.CompContext; +import org.elasticsearch.plan.a.PlanAParser.ConditionalContext; +import org.elasticsearch.plan.a.PlanAParser.ContinueContext; +import org.elasticsearch.plan.a.PlanAParser.DeclContext; +import org.elasticsearch.plan.a.PlanAParser.DeclarationContext; +import org.elasticsearch.plan.a.PlanAParser.DecltypeContext; +import org.elasticsearch.plan.a.PlanAParser.DeclvarContext; +import org.elasticsearch.plan.a.PlanAParser.DoContext; +import org.elasticsearch.plan.a.PlanAParser.EmptyContext; +import org.elasticsearch.plan.a.PlanAParser.ExprContext; +import org.elasticsearch.plan.a.PlanAParser.ExpressionContext; +import org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; +import org.elasticsearch.plan.a.PlanAParser.ExtcallContext; +import org.elasticsearch.plan.a.PlanAParser.ExtcastContext; +import org.elasticsearch.plan.a.PlanAParser.ExtdotContext; +import org.elasticsearch.plan.a.PlanAParser.ExternalContext; +import org.elasticsearch.plan.a.PlanAParser.ExtfieldContext; +import org.elasticsearch.plan.a.PlanAParser.ExtnewContext; +import org.elasticsearch.plan.a.PlanAParser.ExtprecContext; +import org.elasticsearch.plan.a.PlanAParser.ExtstartContext; +import org.elasticsearch.plan.a.PlanAParser.ExtstringContext; +import org.elasticsearch.plan.a.PlanAParser.ExttypeContext; +import org.elasticsearch.plan.a.PlanAParser.ExtvarContext; +import org.elasticsearch.plan.a.PlanAParser.FalseContext; +import org.elasticsearch.plan.a.PlanAParser.ForContext; +import org.elasticsearch.plan.a.PlanAParser.IfContext; +import org.elasticsearch.plan.a.PlanAParser.IncrementContext; +import org.elasticsearch.plan.a.PlanAParser.InitializerContext; +import org.elasticsearch.plan.a.PlanAParser.MultipleContext; +import org.elasticsearch.plan.a.PlanAParser.NullContext; +import org.elasticsearch.plan.a.PlanAParser.NumericContext; +import org.elasticsearch.plan.a.PlanAParser.PostincContext; +import org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; +import org.elasticsearch.plan.a.PlanAParser.PreincContext; +import org.elasticsearch.plan.a.PlanAParser.ReturnContext; +import org.elasticsearch.plan.a.PlanAParser.SingleContext; +import org.elasticsearch.plan.a.PlanAParser.SourceContext; +import org.elasticsearch.plan.a.PlanAParser.StatementContext; +import org.elasticsearch.plan.a.PlanAParser.ThrowContext; +import org.elasticsearch.plan.a.PlanAParser.TrapContext; +import org.elasticsearch.plan.a.PlanAParser.TrueContext; +import org.elasticsearch.plan.a.PlanAParser.TryContext; +import org.elasticsearch.plan.a.PlanAParser.UnaryContext; +import org.elasticsearch.plan.a.PlanAParser.WhileContext; import java.util.ArrayDeque; import java.util.Arrays; @@ -28,81 +94,18 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.elasticsearch.plan.a.Adapter.ExpressionMetadata; -import static org.elasticsearch.plan.a.Adapter.ExtNodeMetadata; -import static org.elasticsearch.plan.a.Adapter.ExternalMetadata; -import static org.elasticsearch.plan.a.Adapter.StatementMetadata; -import static org.elasticsearch.plan.a.Adapter.error; -import static org.elasticsearch.plan.a.Definition.Cast; -import static org.elasticsearch.plan.a.Definition.Constructor; -import static org.elasticsearch.plan.a.Definition.Field; -import static org.elasticsearch.plan.a.Definition.Method; -import static org.elasticsearch.plan.a.Definition.Pair; -import static org.elasticsearch.plan.a.Definition.Sort; -import static org.elasticsearch.plan.a.Definition.Struct; -import static org.elasticsearch.plan.a.Definition.Transform; -import static org.elasticsearch.plan.a.Definition.Type; +import static org.elasticsearch.plan.a.Metadata.error; import static org.elasticsearch.plan.a.PlanAParser.ADD; -import static org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; -import static org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; -import static org.elasticsearch.plan.a.PlanAParser.AssignmentContext; import static org.elasticsearch.plan.a.PlanAParser.BWAND; import static org.elasticsearch.plan.a.PlanAParser.BWOR; import static org.elasticsearch.plan.a.PlanAParser.BWXOR; -import static org.elasticsearch.plan.a.PlanAParser.BinaryContext; -import static org.elasticsearch.plan.a.PlanAParser.BlockContext; -import static org.elasticsearch.plan.a.PlanAParser.BoolContext; -import static org.elasticsearch.plan.a.PlanAParser.BreakContext; -import static org.elasticsearch.plan.a.PlanAParser.CastContext; -import static org.elasticsearch.plan.a.PlanAParser.CharContext; -import static org.elasticsearch.plan.a.PlanAParser.CompContext; -import static org.elasticsearch.plan.a.PlanAParser.ConditionalContext; -import static org.elasticsearch.plan.a.PlanAParser.ContinueContext; import static org.elasticsearch.plan.a.PlanAParser.DIV; -import static org.elasticsearch.plan.a.PlanAParser.DeclContext; -import static org.elasticsearch.plan.a.PlanAParser.DeclarationContext; -import static org.elasticsearch.plan.a.PlanAParser.DecltypeContext; -import static org.elasticsearch.plan.a.PlanAParser.DeclvarContext; -import static org.elasticsearch.plan.a.PlanAParser.DoContext; -import static org.elasticsearch.plan.a.PlanAParser.EmptyContext; -import static org.elasticsearch.plan.a.PlanAParser.ExprContext; -import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtcallContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtcastContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtdotContext; -import static org.elasticsearch.plan.a.PlanAParser.ExternalContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtfieldContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtnewContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtprecContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtstartContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtstringContext; -import static org.elasticsearch.plan.a.PlanAParser.ExttypeContext; -import static org.elasticsearch.plan.a.PlanAParser.ExtvarContext; -import static org.elasticsearch.plan.a.PlanAParser.FalseContext; -import static org.elasticsearch.plan.a.PlanAParser.ForContext; -import static org.elasticsearch.plan.a.PlanAParser.IfContext; -import static org.elasticsearch.plan.a.PlanAParser.IncrementContext; -import static org.elasticsearch.plan.a.PlanAParser.InitializerContext; import static org.elasticsearch.plan.a.PlanAParser.LSH; import static org.elasticsearch.plan.a.PlanAParser.MUL; -import static org.elasticsearch.plan.a.PlanAParser.MultipleContext; -import static org.elasticsearch.plan.a.PlanAParser.NullContext; -import static org.elasticsearch.plan.a.PlanAParser.NumericContext; -import static org.elasticsearch.plan.a.PlanAParser.PostincContext; -import static org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; -import static org.elasticsearch.plan.a.PlanAParser.PreincContext; import static org.elasticsearch.plan.a.PlanAParser.REM; import static org.elasticsearch.plan.a.PlanAParser.RSH; -import static org.elasticsearch.plan.a.PlanAParser.ReturnContext; import static org.elasticsearch.plan.a.PlanAParser.SUB; -import static org.elasticsearch.plan.a.PlanAParser.SingleContext; -import static org.elasticsearch.plan.a.PlanAParser.SourceContext; -import static org.elasticsearch.plan.a.PlanAParser.StatementContext; -import static org.elasticsearch.plan.a.PlanAParser.TrueContext; import static org.elasticsearch.plan.a.PlanAParser.USH; -import static org.elasticsearch.plan.a.PlanAParser.UnaryContext; -import static org.elasticsearch.plan.a.PlanAParser.WhileContext; class Analyzer extends PlanAParserBaseVisitor { private static class Variable { @@ -117,31 +120,30 @@ class Analyzer extends PlanAParserBaseVisitor { } } - static void analyze(final Adapter adapter) { - new Analyzer(adapter); + static void analyze(final Metadata metadata) { + new Analyzer(metadata); } - private final Adapter adapter; + private final Metadata metadata; private final Definition definition; private final CompilerSettings settings; - private final Deque scopes; - private final Deque variables; + private final Deque scopes = new ArrayDeque<>(); + private final Deque variables = new ArrayDeque<>(); - private Analyzer(final Adapter adapter) { - this.adapter = adapter; - definition = adapter.definition; - settings = adapter.settings; - - scopes = new ArrayDeque<>(); - variables = new ArrayDeque<>(); + private Analyzer(final Metadata metadata) { + this.metadata = metadata; + definition = metadata.definition; + settings = metadata.settings; incrementScope(); - addVariable(null, "this", definition.execType); - addVariable(null, "input", definition.smapType); + addVariable(null, "#this", definition.execType); + metadata.inputValueSlot = addVariable(null, "input", definition.smapType).slot; + metadata.scoreValueSlot = addVariable(null, "_score", definition.floatType).slot; + metadata.loopCounterSlot = addVariable(null, "#loop", definition.intType).slot; - adapter.createStatementMetadata(adapter.root); - visit(adapter.root); + metadata.createStatementMetadata(metadata.root); + visit(metadata.root); decrementScope(); } @@ -179,7 +181,7 @@ class Analyzer extends PlanAParserBaseVisitor { throw new IllegalArgumentException("Argument name [" + name + "] already defined within the scope."); } else { throw new IllegalArgumentException( - error(source) + "Variable name [" + name + "] already defined within the scope."); + error(source) + "Variable name [" + name + "] already defined within the scope."); } } @@ -201,34 +203,24 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitSource(final SourceContext ctx) { - final StatementMetadata sourcesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); final List statectxs = ctx.statement(); final StatementContext lastctx = statectxs.get(statectxs.size() - 1); incrementScope(); for (final StatementContext statectx : statectxs) { - if (sourcesmd.allExit) { + if (sourcesmd.allLast) { throw new IllegalArgumentException(error(statectx) + - "Statement will never be executed because all prior paths exit."); + "Statement will never be executed because all prior paths escape."); } - final StatementMetadata statesmd = adapter.createStatementMetadata(statectx); - statesmd.last = statectx == lastctx; + final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + statesmd.lastSource = statectx == lastctx; visit(statectx); - if (statesmd.anyContinue) { - throw new IllegalArgumentException(error(statectx) + - "Cannot have a continue statement outside of a loop."); - } - - if (statesmd.anyBreak) { - throw new IllegalArgumentException(error(statectx) + - "Cannot have a break statement outside of a loop."); - } - - sourcesmd.allExit = statesmd.allExit; - sourcesmd.allReturn = statesmd.allReturn; + sourcesmd.methodEscape = statesmd.methodEscape; + sourcesmd.allLast = statesmd.allLast; } decrementScope(); @@ -238,12 +230,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitIf(final IfContext ctx) { - final StatementMetadata ifsmd = adapter.getStatementMetadata(ctx); + final StatementMetadata ifsmd = metadata.getStatementMetadata(ctx); - incrementScope(); - - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); @@ -253,84 +243,85 @@ class Analyzer extends PlanAParserBaseVisitor { } final BlockContext blockctx0 = ctx.block(0); - final StatementMetadata blocksmd0 = adapter.createStatementMetadata(blockctx0); - blocksmd0.last = ifsmd.last; + final StatementMetadata blocksmd0 = metadata.createStatementMetadata(blockctx0); + blocksmd0.lastSource = ifsmd.lastSource; + blocksmd0.inLoop = ifsmd.inLoop; + blocksmd0.lastLoop = ifsmd.lastLoop; + incrementScope(); visit(blockctx0); + decrementScope(); - ifsmd.anyReturn = blocksmd0.anyReturn; - ifsmd.anyBreak = blocksmd0.anyBreak; ifsmd.anyContinue = blocksmd0.anyContinue; + ifsmd.anyBreak = blocksmd0.anyBreak; + + ifsmd.count = blocksmd0.count; if (ctx.ELSE() != null) { final BlockContext blockctx1 = ctx.block(1); - final StatementMetadata blocksmd1 = adapter.createStatementMetadata(blockctx1); - blocksmd1.last = ifsmd.last; + final StatementMetadata blocksmd1 = metadata.createStatementMetadata(blockctx1); + blocksmd1.lastSource = ifsmd.lastSource; + incrementScope(); visit(blockctx1); + decrementScope(); - ifsmd.allExit = blocksmd0.allExit && blocksmd1.allExit; - ifsmd.allReturn = blocksmd0.allReturn && blocksmd1.allReturn; - ifsmd.anyReturn |= blocksmd1.anyReturn; - ifsmd.allBreak = blocksmd0.allBreak && blocksmd1.allBreak; - ifsmd.anyBreak |= blocksmd1.anyBreak; - ifsmd.allContinue = blocksmd0.allContinue && blocksmd1.allContinue; + ifsmd.methodEscape = blocksmd0.methodEscape && blocksmd1.methodEscape; + ifsmd.loopEscape = blocksmd0.loopEscape && blocksmd1.loopEscape; + ifsmd.allLast = blocksmd0.allLast && blocksmd1.allLast; ifsmd.anyContinue |= blocksmd1.anyContinue; - } + ifsmd.anyBreak |= blocksmd1.anyBreak; - decrementScope(); + ifsmd.count = Math.max(ifsmd.count, blocksmd1.count); + } return null; } @Override public Void visitWhile(final WhileContext ctx) { - final StatementMetadata whilesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata whilesmd = metadata.getStatementMetadata(ctx); incrementScope(); - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); - boolean exitrequired = false; + boolean continuous = false; if (expremd.postConst != null) { - boolean constant = (boolean)expremd.postConst; + continuous = (boolean)expremd.postConst; - if (!constant) { + if (!continuous) { throw new IllegalArgumentException(error(ctx) + "The loop will never be executed."); } - exitrequired = true; + if (ctx.empty() != null) { + throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + } } final BlockContext blockctx = ctx.block(); if (blockctx != null) { - final StatementMetadata blocksmd = adapter.createStatementMetadata(blockctx); + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.beginLoop = true; + blocksmd.inLoop = true; visit(blockctx); - if (blocksmd.allReturn) { - throw new IllegalArgumentException(error(ctx) + "All paths return so the loop is not necessary."); + if (blocksmd.loopEscape && !blocksmd.anyContinue) { + throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); } - if (blocksmd.allBreak) { - throw new IllegalArgumentException(error(ctx) + "All paths break so the loop is not necessary."); + if (continuous && !blocksmd.anyBreak) { + whilesmd.methodEscape = true; + whilesmd.allLast = true; } - - if (exitrequired && !blocksmd.anyReturn && !blocksmd.anyBreak) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); - } - - if (exitrequired && blocksmd.anyReturn && !blocksmd.anyBreak) { - whilesmd.allExit = true; - whilesmd.allReturn = true; - } - } else if (exitrequired) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); } + whilesmd.count = 1; + decrementScope(); return null; @@ -338,49 +329,41 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDo(final DoContext ctx) { - final StatementMetadata dosmd = adapter.getStatementMetadata(ctx); + final StatementMetadata dosmd = metadata.getStatementMetadata(ctx); incrementScope(); final BlockContext blockctx = ctx.block(); - final StatementMetadata blocksmd = adapter.createStatementMetadata(blockctx); + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.beginLoop = true; + blocksmd.inLoop = true; visit(blockctx); - if (blocksmd.allReturn) { - throw new IllegalArgumentException(error(ctx) + "All paths return so the loop is not necessary."); + if (blocksmd.loopEscape && !blocksmd.anyContinue) { + throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); } - if (blocksmd.allBreak) { - throw new IllegalArgumentException(error(ctx) + "All paths break so the loop is not necessary."); - } - - if (blocksmd.allContinue) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); - } - - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); if (expremd.postConst != null) { - final boolean exitrequired = (boolean)expremd.postConst; + final boolean continuous = (boolean)expremd.postConst; - if (exitrequired && !blocksmd.anyReturn && !blocksmd.anyBreak) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); + if (!continuous) { + throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); } - if (exitrequired && blocksmd.anyReturn && !blocksmd.anyBreak) { - dosmd.allExit = true; - dosmd.allReturn = true; - } - - if (!exitrequired && !blocksmd.anyContinue) { - throw new IllegalArgumentException(error(ctx) + "All paths exit so the loop is not necessary."); + if (!blocksmd.anyBreak) { + dosmd.methodEscape = true; + dosmd.allLast = true; } } + dosmd.count = 1; + decrementScope(); return null; @@ -388,72 +371,68 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitFor(final ForContext ctx) { - final StatementMetadata forsmd = adapter.getStatementMetadata(ctx); - boolean exitrequired = false; + final StatementMetadata forsmd = metadata.getStatementMetadata(ctx); + boolean continuous = false; incrementScope(); final InitializerContext initctx = ctx.initializer(); if (initctx != null) { - adapter.createStatementMetadata(initctx); + metadata.createStatementMetadata(initctx); visit(initctx); } - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (exprctx != null) { - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.booleanType; visit(exprctx); markCast(expremd); if (expremd.postConst != null) { - boolean constant = (boolean)expremd.postConst; + continuous = (boolean)expremd.postConst; - if (!constant) { + if (!continuous) { throw new IllegalArgumentException(error(ctx) + "The loop will never be executed."); } - exitrequired = true; + if (ctx.empty() != null) { + throw new IllegalArgumentException(error(ctx) + "The loop is continuous."); + } } } else { - exitrequired = true; + continuous = true; } final AfterthoughtContext atctx = ctx.afterthought(); if (atctx != null) { - adapter.createStatementMetadata(atctx); + metadata.createStatementMetadata(atctx); visit(atctx); } final BlockContext blockctx = ctx.block(); if (blockctx != null) { - final StatementMetadata blocksmd = adapter.createStatementMetadata(blockctx); + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.beginLoop = true; + blocksmd.inLoop = true; visit(blockctx); - if (blocksmd.allReturn) { - throw new IllegalArgumentException(error(ctx) + "All paths return so the loop is not necessary."); + if (blocksmd.loopEscape && !blocksmd.anyContinue) { + throw new IllegalArgumentException(error(ctx) + "All paths escape so the loop is not necessary."); } - if (blocksmd.allBreak) { - throw new IllegalArgumentException(error(ctx) + "All paths break so the loop is not necessary."); + if (continuous && !blocksmd.anyBreak) { + forsmd.methodEscape = true; + forsmd.allLast = true; } - - if (exitrequired && !blocksmd.anyReturn && !blocksmd.anyBreak) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); - } - - if (exitrequired && blocksmd.anyReturn && !blocksmd.anyBreak) { - forsmd.allExit = true; - forsmd.allReturn = true; - } - } else if (exitrequired) { - throw new IllegalArgumentException(error(ctx) + "The loop will never exit."); } + forsmd.count = 1; + decrementScope(); return null; @@ -461,97 +440,185 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDecl(final DeclContext ctx) { + final StatementMetadata declsmd = metadata.getStatementMetadata(ctx); + final DeclarationContext declctx = ctx.declaration(); - adapter.createStatementMetadata(declctx); + metadata.createStatementMetadata(declctx); visit(declctx); + declsmd.count = 1; + return null; } @Override public Void visitContinue(final ContinueContext ctx) { - final StatementMetadata continuesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata continuesmd = metadata.getStatementMetadata(ctx); - continuesmd.allExit = true; - continuesmd.allContinue = true; + if (!continuesmd.inLoop) { + throw new IllegalArgumentException(error(ctx) + "Cannot have a continue statement outside of a loop."); + } + + if (continuesmd.lastLoop) { + throw new IllegalArgumentException(error(ctx) + "Unnessary continue statement at the end of a loop."); + } + + continuesmd.allLast = true; continuesmd.anyContinue = true; + continuesmd.count = 1; + return null; } @Override public Void visitBreak(final BreakContext ctx) { - final StatementMetadata breaksmd = adapter.getStatementMetadata(ctx); + final StatementMetadata breaksmd = metadata.getStatementMetadata(ctx); - breaksmd.allExit = true; - breaksmd.allBreak = true; + if (!breaksmd.inLoop) { + throw new IllegalArgumentException(error(ctx) + "Cannot have a break statement outside of a loop."); + } + + breaksmd.loopEscape = true; + breaksmd.allLast = true; breaksmd.anyBreak = true; + breaksmd.count = 1; + return null; } @Override public Void visitReturn(final ReturnContext ctx) { - final StatementMetadata returnsmd = adapter.getStatementMetadata(ctx); + final StatementMetadata returnsmd = metadata.getStatementMetadata(ctx); - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = definition.objectType; visit(exprctx); markCast(expremd); - returnsmd.allExit = true; - returnsmd.allReturn = true; - returnsmd.anyReturn = true; + returnsmd.methodEscape = true; + returnsmd.loopEscape = true; + returnsmd.allLast = true; + + returnsmd.count = 1; + + return null; + } + + @Override + public Void visitTry(final TryContext ctx) { + final StatementMetadata trysmd = metadata.getStatementMetadata(ctx); + + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.lastSource = trysmd.lastSource; + blocksmd.inLoop = trysmd.inLoop; + blocksmd.lastLoop = trysmd.lastLoop; + incrementScope(); + visit(blockctx); + decrementScope(); + + trysmd.methodEscape = blocksmd.methodEscape; + trysmd.loopEscape = blocksmd.loopEscape; + trysmd.allLast = blocksmd.allLast; + trysmd.anyContinue = blocksmd.anyContinue; + trysmd.anyBreak = blocksmd.anyBreak; + + int trapcount = 0; + + for (final TrapContext trapctx : ctx.trap()) { + final StatementMetadata trapsmd = metadata.createStatementMetadata(trapctx); + trapsmd.lastSource = trysmd.lastSource; + trapsmd.inLoop = trysmd.inLoop; + trapsmd.lastLoop = trysmd.lastLoop; + incrementScope(); + visit(trapctx); + decrementScope(); + + trysmd.methodEscape &= trapsmd.methodEscape; + trysmd.loopEscape &= trapsmd.loopEscape; + trysmd.allLast &= trapsmd.allLast; + trysmd.anyContinue |= trapsmd.anyContinue; + trysmd.anyBreak |= trapsmd.anyBreak; + + trapcount = Math.max(trapcount, trapsmd.count); + } + + trysmd.count = blocksmd.count + trapcount; + + return null; + } + + @Override + public Void visitThrow(final ThrowContext ctx) { + final StatementMetadata throwsmd = metadata.getStatementMetadata(ctx); + + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.to = definition.exceptionType; + visit(exprctx); + markCast(expremd); + + throwsmd.methodEscape = true; + throwsmd.loopEscape = true; + throwsmd.allLast = true; + + throwsmd.count = 1; return null; } @Override public Void visitExpr(final ExprContext ctx) { - final StatementMetadata exprsmd = adapter.getStatementMetadata(ctx); - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); - expremd.read = exprsmd.last; + final StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); + expremd.read = exprsmd.lastSource; visit(exprctx); - if (!expremd.statement && !exprsmd.last) { + if (!expremd.statement && !exprsmd.lastSource) { throw new IllegalArgumentException(error(ctx) + "Not a statement."); } - final boolean rtn = exprsmd.last && expremd.from.sort != Sort.VOID; - exprsmd.allExit = rtn; - exprsmd.allReturn = rtn; - exprsmd.anyReturn = rtn; + final boolean rtn = exprsmd.lastSource && expremd.from.sort != Sort.VOID; + exprsmd.methodEscape = rtn; + exprsmd.loopEscape = rtn; + exprsmd.allLast = rtn; expremd.to = rtn ? definition.objectType : expremd.from; markCast(expremd); + exprsmd.count = 1; + return null; } @Override public Void visitMultiple(final MultipleContext ctx) { - final StatementMetadata multiplesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata multiplesmd = metadata.getStatementMetadata(ctx); final List statectxs = ctx.statement(); final StatementContext lastctx = statectxs.get(statectxs.size() - 1); for (StatementContext statectx : statectxs) { - if (multiplesmd.allExit) { + if (multiplesmd.allLast) { throw new IllegalArgumentException(error(statectx) + - "Statement will never be executed because all prior paths exit."); + "Statement will never be executed because all prior paths escape."); } - final StatementMetadata statesmd = adapter.createStatementMetadata(statectx); - statesmd.last = multiplesmd.last && statectx == lastctx; + final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + statesmd.lastSource = multiplesmd.lastSource && statectx == lastctx; + statesmd.inLoop = multiplesmd.inLoop; + statesmd.lastLoop = (multiplesmd.beginLoop || multiplesmd.lastLoop) && statectx == lastctx; visit(statectx); - multiplesmd.allExit = statesmd.allExit; - multiplesmd.allReturn = statesmd.allReturn && !statesmd.anyBreak && !statesmd.anyContinue; - multiplesmd.anyReturn |= statesmd.anyReturn; - multiplesmd.allBreak = !statesmd.anyReturn && statesmd.allBreak && !statesmd.anyContinue; - multiplesmd.anyBreak |= statesmd.anyBreak; - multiplesmd.allContinue = !statesmd.anyReturn && !statesmd.anyBreak && statesmd.allContinue; + multiplesmd.methodEscape = statesmd.methodEscape; + multiplesmd.loopEscape = statesmd.loopEscape; + multiplesmd.allLast = statesmd.allLast; multiplesmd.anyContinue |= statesmd.anyContinue; + multiplesmd.anyBreak |= statesmd.anyBreak; + + multiplesmd.count += statesmd.count; } return null; @@ -559,20 +626,22 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitSingle(final SingleContext ctx) { - final StatementMetadata singlesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata singlesmd = metadata.getStatementMetadata(ctx); final StatementContext statectx = ctx.statement(); - final StatementMetadata statesmd = adapter.createStatementMetadata(statectx); - statesmd.last = singlesmd.last; + final StatementMetadata statesmd = metadata.createStatementMetadata(statectx); + statesmd.lastSource = singlesmd.lastSource; + statesmd.inLoop = singlesmd.inLoop; + statesmd.lastLoop = singlesmd.beginLoop || singlesmd.lastLoop; visit(statectx); - singlesmd.allExit = statesmd.allExit; - singlesmd.allReturn = statesmd.allReturn; - singlesmd.anyReturn = statesmd.anyReturn; - singlesmd.allBreak = statesmd.allBreak; - singlesmd.anyBreak = statesmd.anyBreak; - singlesmd.allContinue = statesmd.allContinue; + singlesmd.methodEscape = statesmd.methodEscape; + singlesmd.loopEscape = statesmd.loopEscape; + singlesmd.allLast = statesmd.allLast; singlesmd.anyContinue = statesmd.anyContinue; + singlesmd.anyBreak = statesmd.anyBreak; + + singlesmd.count = statesmd.count; return null; } @@ -585,13 +654,13 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitInitializer(InitializerContext ctx) { final DeclarationContext declctx = ctx.declaration(); - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (declctx != null) { - adapter.createStatementMetadata(declctx); + metadata.createStatementMetadata(declctx); visit(declctx); } else if (exprctx != null) { - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.read = false; visit(exprctx); @@ -600,7 +669,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (!expremd.statement) { throw new IllegalArgumentException(error(exprctx) + - "The intializer of a for loop must be a statement."); + "The intializer of a for loop must be a statement."); } } else { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -611,10 +680,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitAfterthought(AfterthoughtContext ctx) { - ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (exprctx != null) { - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.read = false; visit(exprctx); @@ -623,7 +692,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (!expremd.statement) { throw new IllegalArgumentException(error(exprctx) + - "The afterthought of a for loop must be a statement."); + "The afterthought of a for loop must be a statement."); } } @@ -633,11 +702,11 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDeclaration(final DeclarationContext ctx) { final DecltypeContext decltypectx = ctx.decltype(); - final ExpressionMetadata decltypeemd = adapter.createExpressionMetadata(decltypectx); + final ExpressionMetadata decltypeemd = metadata.createExpressionMetadata(decltypectx); visit(decltypectx); for (final DeclvarContext declvarctx : ctx.declvar()) { - final ExpressionMetadata declvaremd = adapter.createExpressionMetadata(declvarctx); + final ExpressionMetadata declvaremd = metadata.createExpressionMetadata(declvarctx); declvaremd.to = decltypeemd.from; visit(declvarctx); } @@ -647,7 +716,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDecltype(final DecltypeContext ctx) { - final ExpressionMetadata decltypeemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata decltypeemd = metadata.getExpressionMetadata(ctx); final String name = ctx.getText(); decltypeemd.from = definition.getType(name); @@ -657,15 +726,15 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitDeclvar(final DeclvarContext ctx) { - final ExpressionMetadata declvaremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); final String name = ctx.ID().getText(); declvaremd.postConst = addVariable(ctx, name, declvaremd.to).slot; - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); if (exprctx != null) { - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = declvaremd.to; visit(exprctx); markCast(expremd); @@ -674,6 +743,43 @@ class Analyzer extends PlanAParserBaseVisitor { return null; } + @Override + public Void visitTrap(final TrapContext ctx) { + final StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); + + final String type = ctx.TYPE().getText(); + trapsmd.exception = definition.getType(type); + + try { + trapsmd.exception.clazz.asSubclass(Exception.class); + } catch (final ClassCastException exception) { + throw new IllegalArgumentException(error(ctx) + "Invalid exception type [" + trapsmd.exception.name + "]."); + } + + final String id = ctx.ID().getText(); + trapsmd.slot = addVariable(ctx, id, trapsmd.exception).slot; + + final BlockContext blockctx = ctx.block(); + + if (blockctx != null) { + final StatementMetadata blocksmd = metadata.createStatementMetadata(blockctx); + blocksmd.lastSource = trapsmd.lastSource; + blocksmd.inLoop = trapsmd.inLoop; + blocksmd.lastLoop = trapsmd.lastLoop; + visit(blockctx); + + trapsmd.methodEscape = blocksmd.methodEscape; + trapsmd.loopEscape = blocksmd.loopEscape; + trapsmd.allLast = blocksmd.allLast; + trapsmd.anyContinue = blocksmd.anyContinue; + trapsmd.anyBreak = blocksmd.anyBreak; + } else if (ctx.emptyscope() == null) { + throw new IllegalStateException(error(ctx) + "Unexpected parser state."); + } + + return null; + } + @Override public Void visitPrecedence(final PrecedenceContext ctx) { throw new UnsupportedOperationException(error(ctx) + "Unexpected parser state."); @@ -681,7 +787,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitNumeric(final NumericContext ctx) { - final ExpressionMetadata numericemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); final boolean negate = ctx.parent instanceof UnaryContext && ((UnaryContext)ctx.parent).SUB() != null; if (ctx.DECIMAL() != null) { @@ -770,7 +876,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitChar(final CharContext ctx) { - final ExpressionMetadata charemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); if (ctx.CHAR() == null) { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -784,7 +890,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitTrue(final TrueContext ctx) { - final ExpressionMetadata trueemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); if (ctx.TRUE() == null) { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -798,7 +904,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitFalse(final FalseContext ctx) { - final ExpressionMetadata falseemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); if (ctx.FALSE() == null) { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -812,7 +918,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitNull(final NullContext ctx) { - final ExpressionMetadata nullemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); if (ctx.NULL() == null) { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -835,10 +941,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExternal(final ExternalContext ctx) { - final ExpressionMetadata extemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata extemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = extemd.read; visit(extstartctx); @@ -852,10 +958,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitPostinc(final PostincContext ctx) { - final ExpressionMetadata postincemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata postincemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = postincemd.read; extstartemd.storeExpr = ctx.increment(); extstartemd.token = ADD; @@ -871,10 +977,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitPreinc(final PreincContext ctx) { - final ExpressionMetadata preincemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata preincemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = preincemd.read; extstartemd.storeExpr = ctx.increment(); extstartemd.token = ADD; @@ -890,10 +996,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitUnary(final UnaryContext ctx) { - final ExpressionMetadata unaryemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); if (ctx.BOOLNOT() != null) { expremd.to = definition.booleanType; @@ -912,7 +1018,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (promote == null) { throw new ClassCastException("Cannot apply [" + ctx.getChild(0).getText() + "] " + - "operation to type [" + expremd.from.name + "]."); + "operation to type [" + expremd.from.name + "]."); } expremd.to = promote; @@ -981,17 +1087,17 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitCast(final CastContext ctx) { - final ExpressionMetadata castemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); final DecltypeContext decltypectx = ctx.decltype(); - final ExpressionMetadata decltypemd = adapter.createExpressionMetadata(decltypectx); + final ExpressionMetadata decltypemd = metadata.createExpressionMetadata(decltypectx); visit(decltypectx); final Type type = decltypemd.from; castemd.from = type; - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = type; expremd.explicit = true; visit(exprctx); @@ -1008,26 +1114,26 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitBinary(final BinaryContext ctx) { - final ExpressionMetadata binaryemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); - final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); visit(exprctx0); - final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); visit(exprctx1); final boolean decimal = ctx.MUL() != null || ctx.DIV() != null || ctx.REM() != null || ctx.SUB() != null; final boolean add = ctx.ADD() != null; final boolean xor = ctx.BWXOR() != null; final Type promote = add ? promoteAdd(expremd0.from, expremd1.from) : - xor ? promoteXor(expremd0.from, expremd1.from) : - promoteNumeric(expremd0.from, expremd1.from, decimal, true); + xor ? promoteXor(expremd0.from, expremd1.from) : + promoteNumeric(expremd0.from, expremd1.from, decimal, true); if (promote == null) { throw new ClassCastException("Cannot apply [" + ctx.getChild(1).getText() + "] " + - "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); + "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); } final Sort sort = promote.sort; @@ -1234,16 +1340,16 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitComp(final CompContext ctx) { - final ExpressionMetadata compemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); final boolean equality = ctx.EQ() != null || ctx.NE() != null; final boolean reference = ctx.EQR() != null || ctx.NER() != null; - final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); visit(exprctx0); - final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); visit(exprctx1); if (expremd0.isNull && expremd1.isNull) { @@ -1251,12 +1357,12 @@ class Analyzer extends PlanAParserBaseVisitor { } final Type promote = equality ? promoteEquality(expremd0.from, expremd1.from) : - reference ? promoteReference(expremd0.from, expremd1.from) : - promoteNumeric(expremd0.from, expremd1.from, true, true); + reference ? promoteReference(expremd0.from, expremd1.from) : + promoteNumeric(expremd0.from, expremd1.from, true, true); if (promote == null) { throw new ClassCastException("Cannot apply [" + ctx.getChild(1).getText() + "] " + - "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); + "operation to types [" + expremd0.from.name + "] and [" + expremd1.from.name + "]."); } expremd0.to = promote; @@ -1356,16 +1462,16 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitBool(final BoolContext ctx) { - final ExpressionMetadata boolemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); - final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); expremd0.to = definition.booleanType; visit(exprctx0); markCast(expremd0); - final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); expremd1.to = definition.booleanType; visit(exprctx1); markCast(expremd1); @@ -1388,10 +1494,10 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitConditional(final ConditionalContext ctx) { - final ExpressionMetadata condemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); - final ExpressionContext exprctx0 = adapter.updateExpressionTree(ctx.expression(0)); - final ExpressionMetadata expremd0 = adapter.createExpressionMetadata(exprctx0); + final ExpressionContext exprctx0 = metadata.updateExpressionTree(ctx.expression(0)); + final ExpressionMetadata expremd0 = metadata.createExpressionMetadata(exprctx0); expremd0.to = definition.booleanType; visit(exprctx0); markCast(expremd0); @@ -1400,14 +1506,14 @@ class Analyzer extends PlanAParserBaseVisitor { throw new IllegalArgumentException(error(ctx) + "Unnecessary conditional statement."); } - final ExpressionContext exprctx1 = adapter.updateExpressionTree(ctx.expression(1)); - final ExpressionMetadata expremd1 = adapter.createExpressionMetadata(exprctx1); + final ExpressionContext exprctx1 = metadata.updateExpressionTree(ctx.expression(1)); + final ExpressionMetadata expremd1 = metadata.createExpressionMetadata(exprctx1); expremd1.to = condemd.to; expremd1.explicit = condemd.explicit; visit(exprctx1); - final ExpressionContext exprctx2 = adapter.updateExpressionTree(ctx.expression(2)); - final ExpressionMetadata expremd2 = adapter.createExpressionMetadata(exprctx2); + final ExpressionContext exprctx2 = metadata.updateExpressionTree(ctx.expression(2)); + final ExpressionMetadata expremd2 = metadata.createExpressionMetadata(exprctx2); expremd2.to = condemd.to; expremd2.explicit = condemd.explicit; visit(exprctx2); @@ -1432,13 +1538,13 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitAssignment(final AssignmentContext ctx) { - final ExpressionMetadata assignemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata assignemd = metadata.getExpressionMetadata(ctx); final ExtstartContext extstartctx = ctx.extstart(); - final ExternalMetadata extstartemd = adapter.createExternalMetadata(extstartctx); + final ExternalMetadata extstartemd = metadata.createExternalMetadata(extstartctx); extstartemd.read = assignemd.read; - extstartemd.storeExpr = adapter.updateExpressionTree(ctx.expression()); + extstartemd.storeExpr = metadata.updateExpressionTree(ctx.expression()); if (ctx.AMUL() != null) { extstartemd.token = MUL; @@ -1483,22 +1589,22 @@ class Analyzer extends PlanAParserBaseVisitor { final ExtstringContext stringctx = ctx.extstring(); if (precctx != null) { - adapter.createExtNodeMetadata(ctx, precctx); + metadata.createExtNodeMetadata(ctx, precctx); visit(precctx); } else if (castctx != null) { - adapter.createExtNodeMetadata(ctx, castctx); + metadata.createExtNodeMetadata(ctx, castctx); visit(castctx); } else if (typectx != null) { - adapter.createExtNodeMetadata(ctx, typectx); + metadata.createExtNodeMetadata(ctx, typectx); visit(typectx); } else if (varctx != null) { - adapter.createExtNodeMetadata(ctx, varctx); + metadata.createExtNodeMetadata(ctx, varctx); visit(varctx); } else if (newctx != null) { - adapter.createExtNodeMetadata(ctx, newctx); + metadata.createExtNodeMetadata(ctx, newctx); visit(newctx); } else if (stringctx != null) { - adapter.createExtNodeMetadata(ctx, stringctx); + metadata.createExtNodeMetadata(ctx, stringctx); visit(stringctx); } else { throw new IllegalStateException(); @@ -1509,9 +1615,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtprec(final ExtprecContext ctx) { - final ExtNodeMetadata precenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata precenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = precenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtprecContext precctx = ctx.extprec(); final ExtcastContext castctx = ctx.extcast(); @@ -1528,22 +1634,22 @@ class Analyzer extends PlanAParserBaseVisitor { } if (precctx != null) { - adapter.createExtNodeMetadata(parent, precctx); + metadata.createExtNodeMetadata(parent, precctx); visit(precctx); } else if (castctx != null) { - adapter.createExtNodeMetadata(parent, castctx); + metadata.createExtNodeMetadata(parent, castctx); visit(castctx); } else if (typectx != null) { - adapter.createExtNodeMetadata(parent, typectx); + metadata.createExtNodeMetadata(parent, typectx); visit(typectx); } else if (varctx != null) { - adapter.createExtNodeMetadata(parent, varctx); + metadata.createExtNodeMetadata(parent, varctx); visit(varctx); } else if (newctx != null) { - adapter.createExtNodeMetadata(parent, newctx); + metadata.createExtNodeMetadata(parent, newctx); visit(newctx); } else if (stringctx != null) { - adapter.createExtNodeMetadata(ctx, stringctx); + metadata.createExtNodeMetadata(ctx, stringctx); visit(stringctx); } else { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); @@ -1554,12 +1660,12 @@ class Analyzer extends PlanAParserBaseVisitor { if (dotctx != null) { --parentemd.scope; - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { --parentemd.scope; - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -1568,9 +1674,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtcast(final ExtcastContext ctx) { - final ExtNodeMetadata castenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = castenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtprecContext precctx = ctx.extprec(); final ExtcastContext castctx = ctx.extcast(); @@ -1580,29 +1686,29 @@ class Analyzer extends PlanAParserBaseVisitor { final ExtstringContext stringctx = ctx.extstring(); if (precctx != null) { - adapter.createExtNodeMetadata(parent, precctx); + metadata.createExtNodeMetadata(parent, precctx); visit(precctx); } else if (castctx != null) { - adapter.createExtNodeMetadata(parent, castctx); + metadata.createExtNodeMetadata(parent, castctx); visit(castctx); } else if (typectx != null) { - adapter.createExtNodeMetadata(parent, typectx); + metadata.createExtNodeMetadata(parent, typectx); visit(typectx); } else if (varctx != null) { - adapter.createExtNodeMetadata(parent, varctx); + metadata.createExtNodeMetadata(parent, varctx); visit(varctx); } else if (newctx != null) { - adapter.createExtNodeMetadata(parent, newctx); + metadata.createExtNodeMetadata(parent, newctx); visit(newctx); } else if (stringctx != null) { - adapter.createExtNodeMetadata(ctx, stringctx); + metadata.createExtNodeMetadata(ctx, stringctx); visit(stringctx); } else { throw new IllegalStateException(error(ctx) + "Unexpected parser state."); } final DecltypeContext declctx = ctx.decltype(); - final ExpressionMetadata declemd = adapter.createExpressionMetadata(declctx); + final ExpressionMetadata declemd = metadata.createExpressionMetadata(declctx); visit(declctx); castenmd.castTo = getLegalCast(ctx, parentemd.current, declemd.from, true); @@ -1615,9 +1721,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtbrace(final ExtbraceContext ctx) { - final ExtNodeMetadata braceenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = braceenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final boolean array = parentemd.current.sort == Sort.ARRAY; final boolean def = parentemd.current.sort == Sort.DEF; @@ -1643,8 +1749,8 @@ class Analyzer extends PlanAParserBaseVisitor { braceenmd.last = parentemd.scope == 0 && dotctx == null && bracectx == null; - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); if (array || def) { expremd.to = array ? definition.intType : definition.objectType; @@ -1653,15 +1759,15 @@ class Analyzer extends PlanAParserBaseVisitor { braceenmd.target = "#brace"; braceenmd.type = def ? definition.defType : - definition.getType(parentemd.current.struct, parentemd.current.type.getDimensions() - 1); + definition.getType(parentemd.current.struct, parentemd.current.type.getDimensions() - 1); analyzeLoadStoreExternal(ctx); parentemd.current = braceenmd.type; if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } } else { @@ -1680,16 +1786,16 @@ class Analyzer extends PlanAParserBaseVisitor { if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1)) { throw new IllegalArgumentException(error(ctx) + - "Illegal map get shortcut for type [" + parentemd.current.name + "]."); + "Illegal map get shortcut for type [" + parentemd.current.name + "]."); } if (setter != null && setter.arguments.size() != 2) { throw new IllegalArgumentException(error(ctx) + - "Illegal map set shortcut for type [" + parentemd.current.name + "]."); + "Illegal map set shortcut for type [" + parentemd.current.name + "]."); } if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) - || !getter.rtn.equals(setter.arguments.get(1)))) { + || !getter.rtn.equals(setter.arguments.get(1)))) { throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); } @@ -1697,21 +1803,21 @@ class Analyzer extends PlanAParserBaseVisitor { settype = setter == null ? null : setter.arguments.get(1); } else if (list) { getter = parentemd.current.struct.methods.get("get"); - setter = parentemd.current.struct.methods.get("add"); + setter = parentemd.current.struct.methods.get("set"); if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || - getter.arguments.get(0).sort != Sort.INT)) { + getter.arguments.get(0).sort != Sort.INT)) { throw new IllegalArgumentException(error(ctx) + - "Illegal list get shortcut for type [" + parentemd.current.name + "]."); + "Illegal list get shortcut for type [" + parentemd.current.name + "]."); } if (setter != null && (setter.arguments.size() != 2 || setter.arguments.get(0).sort != Sort.INT)) { throw new IllegalArgumentException(error(ctx) + - "Illegal list set shortcut for type [" + parentemd.current.name + "]."); + "Illegal list set shortcut for type [" + parentemd.current.name + "]."); } if (getter != null && setter != null && (!getter.arguments.get(0).equals(setter.arguments.get(0)) - || !getter.rtn.equals(setter.arguments.get(1)))) { + || !getter.rtn.equals(setter.arguments.get(1)))) { throw new IllegalArgumentException(error(ctx) + "Shortcut argument types must match."); } @@ -1735,7 +1841,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (braceenmd.target == null) { throw new IllegalArgumentException(error(ctx) + - "Attempting to address a non-array type [" + parentemd.current.name + "] as an array."); + "Attempting to address a non-array type [" + parentemd.current.name + "] as an array."); } return null; @@ -1743,17 +1849,17 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtdot(final ExtdotContext ctx) { - final ExtNodeMetadata dotemnd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata dotemnd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = dotemnd.parent; final ExtcallContext callctx = ctx.extcall(); final ExtfieldContext fieldctx = ctx.extfield(); if (callctx != null) { - adapter.createExtNodeMetadata(parent, callctx); + metadata.createExtNodeMetadata(parent, callctx); visit(callctx); } else if (fieldctx != null) { - adapter.createExtNodeMetadata(parent, fieldctx); + metadata.createExtNodeMetadata(parent, fieldctx); visit(fieldctx); } @@ -1762,9 +1868,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExttype(final ExttypeContext ctx) { - final ExtNodeMetadata typeenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata typeenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = typeenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); if (parentemd.current != null) { throw new IllegalArgumentException(error(ctx) + "Unexpected static type."); @@ -1776,7 +1882,7 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.statik = true; final ExtdotContext dotctx = ctx.extdot(); - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); return null; @@ -1784,9 +1890,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtcall(final ExtcallContext ctx) { - final ExtNodeMetadata callenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata callenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = callenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtdotContext dotctx = ctx.extdot(); final ExtbraceContext bracectx = ctx.extbrace(); @@ -1811,7 +1917,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (method == null && !def) { throw new IllegalArgumentException( - error(ctx) + "Unknown call [" + name + "] on type [" + struct.name + "]."); + error(ctx) + "Unknown call [" + name + "] on type [" + struct.name + "]."); } else if (method != null) { types = new Type[method.arguments.size()]; method.arguments.toArray(types); @@ -1823,8 +1929,8 @@ class Analyzer extends PlanAParserBaseVisitor { if (size != types.length) { throw new IllegalArgumentException(error(ctx) + "When calling [" + name + "] on type " + - "[" + struct.name + "] expected [" + types.length + "] arguments," + - " but found [" + arguments.size() + "]."); + "[" + struct.name + "] expected [" + types.length + "] arguments," + + " but found [" + arguments.size() + "]."); } } else { types = new Type[arguments.size()]; @@ -1837,8 +1943,8 @@ class Analyzer extends PlanAParserBaseVisitor { } for (int argument = 0; argument < size; ++argument) { - final ExpressionContext exprctx = adapter.updateExpressionTree(arguments.get(argument)); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(arguments.get(argument)); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = types[argument]; visit(exprctx); markCast(expremd); @@ -1847,10 +1953,10 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.statik = false; if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -1859,9 +1965,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtvar(final ExtvarContext ctx) { - final ExtNodeMetadata varenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata varenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = varenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final String name = ctx.ID().getText(); @@ -1886,10 +1992,10 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.current = varenmd.type; if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -1898,9 +2004,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtfield(final ExtfieldContext ctx) { - final ExtNodeMetadata memberenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = memberenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); if (ctx.EXTID() == null && ctx.EXTINTEGER() == null) { throw new IllegalArgumentException(error(ctx) + "Unexpected parser state."); @@ -1924,7 +2030,7 @@ class Analyzer extends PlanAParserBaseVisitor { throw new IllegalArgumentException(error(ctx) + "Must read array field [length]."); } else if (store) { throw new IllegalArgumentException( - error(ctx) + "Cannot write to read-only array field [length]."); + error(ctx) + "Cannot write to read-only array field [length]."); } memberenmd.target = "#length"; @@ -1945,7 +2051,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (field != null) { if (store && java.lang.reflect.Modifier.isFinal(field.reflect.getModifiers())) { throw new IllegalArgumentException(error(ctx) + "Cannot write to read-only" + - " field [" + value + "] for type [" + struct.name + "]."); + " field [" + value + "] for type [" + struct.name + "]."); } memberenmd.target = field; @@ -1962,12 +2068,12 @@ class Analyzer extends PlanAParserBaseVisitor { if (getter != null && (getter.rtn.sort == Sort.VOID || !getter.arguments.isEmpty())) { throw new IllegalArgumentException(error(ctx) + - "Illegal get shortcut on field [" + value + "] for type [" + struct.name + "]."); + "Illegal get shortcut on field [" + value + "] for type [" + struct.name + "]."); } if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 1)) { throw new IllegalArgumentException(error(ctx) + - "Illegal set shortcut on field [" + value + "] for type [" + struct.name + "]."); + "Illegal set shortcut on field [" + value + "] for type [" + struct.name + "]."); } Type settype = setter == null ? null : setter.arguments.get(0); @@ -1983,13 +2089,13 @@ class Analyzer extends PlanAParserBaseVisitor { if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || getter.arguments.get(0).sort != Sort.STRING)) { throw new IllegalArgumentException(error(ctx) + - "Illegal map get shortcut [" + value + "] for type [" + struct.name + "]."); + "Illegal map get shortcut [" + value + "] for type [" + struct.name + "]."); } if (setter != null && (setter.arguments.size() != 2 || - setter.arguments.get(0).sort != Sort.STRING)) { + setter.arguments.get(0).sort != Sort.STRING)) { throw new IllegalArgumentException(error(ctx) + - "Illegal map set shortcut [" + value + "] for type [" + struct.name + "]."); + "Illegal map set shortcut [" + value + "] for type [" + struct.name + "]."); } if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { @@ -2006,18 +2112,18 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.current.clazz.asSubclass(List.class); getter = parentemd.current.struct.methods.get("get"); - setter = parentemd.current.struct.methods.get("add"); + setter = parentemd.current.struct.methods.get("set"); if (getter != null && (getter.rtn.sort == Sort.VOID || getter.arguments.size() != 1 || - getter.arguments.get(0).sort != Sort.INT)) { + getter.arguments.get(0).sort != Sort.INT)) { throw new IllegalArgumentException(error(ctx) + - "Illegal list get shortcut [" + value + "] for type [" + struct.name + "]."); + "Illegal list get shortcut [" + value + "] for type [" + struct.name + "]."); } if (setter != null && (setter.rtn.sort != Sort.VOID || setter.arguments.size() != 2 || - setter.arguments.get(0).sort != Sort.INT)) { + setter.arguments.get(0).sort != Sort.INT)) { throw new IllegalArgumentException(error(ctx) + - "Illegal list add shortcut [" + value + "] for type [" + struct.name + "]."); + "Illegal list set shortcut [" + value + "] for type [" + struct.name + "]."); } if (getter != null && setter != null && !getter.rtn.equals(setter.arguments.get(1))) { @@ -2030,7 +2136,7 @@ class Analyzer extends PlanAParserBaseVisitor { constant = Integer.parseInt(value); } catch (NumberFormatException exception) { throw new IllegalArgumentException(error(ctx) + - "Illegal list shortcut value [" + value + "]."); + "Illegal list shortcut value [" + value + "]."); } } catch (ClassCastException exception) { //Do nothing. @@ -2050,17 +2156,17 @@ class Analyzer extends PlanAParserBaseVisitor { if (memberenmd.target == null) { throw new IllegalArgumentException( - error(ctx) + "Unknown field [" + value + "] for type [" + struct.name + "]."); + error(ctx) + "Unknown field [" + value + "] for type [" + struct.name + "]."); } } parentemd.statik = false; if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -2069,9 +2175,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtnew(ExtnewContext ctx) { - final ExtNodeMetadata newenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata newenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = newenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final ExtdotContext dotctx = ctx.extdot(); final ExtbraceContext bracectx = ctx.extbrace(); @@ -2129,7 +2235,7 @@ class Analyzer extends PlanAParserBaseVisitor { parentemd.current = newenmd.type; } else { throw new IllegalArgumentException( - error(ctx) + "Unknown new call on type [" + struct.name + "]."); + error(ctx) + "Unknown new call on type [" + struct.name + "]."); } } else { throw new IllegalArgumentException(error(ctx) + "Unknown parser state."); @@ -2137,23 +2243,23 @@ class Analyzer extends PlanAParserBaseVisitor { if (size != types.length) { throw new IllegalArgumentException(error(ctx) + "When calling [" + name + "] on type " + - "[" + struct.name + "] expected [" + types.length + "] arguments," + - " but found [" + arguments.size() + "]."); + "[" + struct.name + "] expected [" + types.length + "] arguments," + + " but found [" + arguments.size() + "]."); } for (int argument = 0; argument < size; ++argument) { - final ExpressionContext exprctx = adapter.updateExpressionTree(arguments.get(argument)); - final ExpressionMetadata expremd = adapter.createExpressionMetadata(exprctx); + final ExpressionContext exprctx = metadata.updateExpressionTree(arguments.get(argument)); + final ExpressionMetadata expremd = metadata.createExpressionMetadata(exprctx); expremd.to = types[argument]; visit(exprctx); markCast(expremd); } if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -2162,9 +2268,9 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitExtstring(final ExtstringContext ctx) { - final ExtNodeMetadata memberenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata memberenmd = metadata.getExtNodeMetadata(ctx); final ParserRuleContext parent = memberenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); final String string = ctx.STRING().getText(); @@ -2182,7 +2288,7 @@ class Analyzer extends PlanAParserBaseVisitor { throw new IllegalArgumentException(error(ctx) + "Must read String constant [" + string + "]."); } else if (store) { throw new IllegalArgumentException( - error(ctx) + "Cannot write to read-only String constant [" + string + "]."); + error(ctx) + "Cannot write to read-only String constant [" + string + "]."); } memberenmd.target = string; @@ -2194,10 +2300,10 @@ class Analyzer extends PlanAParserBaseVisitor { } if (dotctx != null) { - adapter.createExtNodeMetadata(parent, dotctx); + metadata.createExtNodeMetadata(parent, dotctx); visit(dotctx); } else if (bracectx != null) { - adapter.createExtNodeMetadata(parent, bracectx); + metadata.createExtNodeMetadata(parent, bracectx); visit(bracectx); } @@ -2211,7 +2317,7 @@ class Analyzer extends PlanAParserBaseVisitor { @Override public Void visitIncrement(IncrementContext ctx) { - final ExpressionMetadata incremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); final Sort sort = incremd.to == null ? null : incremd.to.sort; final boolean positive = ctx.INCR() != null; @@ -2242,13 +2348,13 @@ class Analyzer extends PlanAParserBaseVisitor { } private void analyzeLoadStoreExternal(final ParserRuleContext source) { - final ExtNodeMetadata extenmd = adapter.getExtNodeMetadata(source); + final ExtNodeMetadata extenmd = metadata.getExtNodeMetadata(source); final ParserRuleContext parent = extenmd.parent; - final ExternalMetadata parentemd = adapter.getExternalMetadata(parent); + final ExternalMetadata parentemd = metadata.getExternalMetadata(parent); if (extenmd.last && parentemd.storeExpr != null) { final ParserRuleContext store = parentemd.storeExpr; - final ExpressionMetadata storeemd = adapter.createExpressionMetadata(parentemd.storeExpr); + final ExpressionMetadata storeemd = metadata.createExpressionMetadata(parentemd.storeExpr); final int token = parentemd.token; if (token > 0) { @@ -2259,12 +2365,12 @@ class Analyzer extends PlanAParserBaseVisitor { final boolean decimal = token == MUL || token == DIV || token == REM || token == SUB; extenmd.promote = add ? promoteAdd(extenmd.type, storeemd.from) : - xor ? promoteXor(extenmd.type, storeemd.from) : - promoteNumeric(extenmd.type, storeemd.from, decimal, true); + xor ? promoteXor(extenmd.type, storeemd.from) : + promoteNumeric(extenmd.type, storeemd.from, decimal, true); if (extenmd.promote == null) { throw new IllegalArgumentException("Cannot apply compound assignment to " + - " types [" + extenmd.type.name + "] and [" + storeemd.from.name + "]."); + "types [" + extenmd.type.name + "] and [" + storeemd.from.name + "]."); } extenmd.castFrom = getLegalCast(source, extenmd.type, extenmd.promote, false); @@ -2722,7 +2828,7 @@ class Analyzer extends PlanAParserBaseVisitor { from.clazz.asSubclass(to.clazz); return cast; - } catch (ClassCastException cce0) { + } catch (final ClassCastException cce0) { try { if (explicit) { to.clazz.asSubclass(from.clazz); @@ -2730,11 +2836,11 @@ class Analyzer extends PlanAParserBaseVisitor { return cast; } else { throw new ClassCastException( - error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); - } - } catch (ClassCastException cce1) { - throw new ClassCastException( error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); + } + } catch (final ClassCastException cce1) { + throw new ClassCastException( + error(source) + "Cannot cast from [" + from.name + "] to [" + to.name + "]."); } } } @@ -2744,7 +2850,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (transform == null) { throw new ClassCastException( - error(source) + "Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "]."); + error(source) + "Cannot cast from [" + cast.from.name + "] to [" + cast.to.name + "]."); } return transform; @@ -2782,8 +2888,8 @@ class Analyzer extends PlanAParserBaseVisitor { } } else { throw new IllegalStateException(error(source) + "No valid constant cast from " + - "[" + cast.from.clazz.getCanonicalName() + "] to " + - "[" + cast.to.clazz.getCanonicalName() + "]."); + "[" + cast.from.clazz.getCanonicalName() + "] to " + + "[" + cast.to.clazz.getCanonicalName() + "]."); } } } @@ -2800,10 +2906,10 @@ class Analyzer extends PlanAParserBaseVisitor { return jmethod.invoke(object); } } catch (IllegalAccessException | IllegalArgumentException | - java.lang.reflect.InvocationTargetException | NullPointerException | - ExceptionInInitializerError exception) { + java.lang.reflect.InvocationTargetException | NullPointerException | + ExceptionInInitializerError exception) { throw new IllegalStateException(error(source) + "Unable to invoke transform to cast constant from " + - "[" + transform.from.name + "] to [" + transform.to.name + "]."); + "[" + transform.from.name + "] to [" + transform.to.name + "]."); } } @@ -2813,7 +2919,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (sort == Sort.DEF) { return definition.defType; } else if ((sort == Sort.DOUBLE || sort == Sort.DOUBLE_OBJ || sort == Sort.NUMBER) && decimal) { - return primitive ? definition.doubleType : definition.doubleobjType; + return primitive ? definition.doubleType : definition.doubleobjType; } else if ((sort == Sort.FLOAT || sort == Sort.FLOAT_OBJ) && decimal) { return primitive ? definition.floatType : definition.floatobjType; } else if (sort == Sort.LONG || sort == Sort.LONG_OBJ || sort == Sort.NUMBER) { @@ -2835,7 +2941,7 @@ class Analyzer extends PlanAParserBaseVisitor { if (decimal) { if (sort0 == Sort.DOUBLE || sort0 == Sort.DOUBLE_OBJ || sort0 == Sort.NUMBER || - sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ || sort1 == Sort.NUMBER) { + sort1 == Sort.DOUBLE || sort1 == Sort.DOUBLE_OBJ || sort1 == Sort.NUMBER) { return primitive ? definition.doubleType : definition.doubleobjType; } else if (sort0 == Sort.FLOAT || sort0 == Sort.FLOAT_OBJ || sort1 == Sort.FLOAT || sort1 == Sort.FLOAT_OBJ) { return primitive ? definition.floatType : definition.floatobjType; @@ -2843,8 +2949,8 @@ class Analyzer extends PlanAParserBaseVisitor { } if (sort0 == Sort.LONG || sort0 == Sort.LONG_OBJ || sort0 == Sort.NUMBER || - sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ || sort1 == Sort.NUMBER) { - return primitive ? definition.longType : definition.longobjType; + sort1 == Sort.LONG || sort1 == Sort.LONG_OBJ || sort1 == Sort.NUMBER) { + return primitive ? definition.longType : definition.longobjType; } else if (sort0.numeric && sort1.numeric) { return primitive ? definition.intType : definition.intobjType; } @@ -2885,7 +2991,7 @@ class Analyzer extends PlanAParserBaseVisitor { final boolean primitive = sort0.primitive && sort1.primitive; if (sort0.bool && sort1.bool) { - return primitive ? definition.booleanType : definition.byteobjType; + return primitive ? definition.booleanType : definition.booleanobjType; } if (sort0.numeric && sort1.numeric) { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java index 4d6936a0d72..b6aa5f075ab 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Compiler.java @@ -30,88 +30,87 @@ import java.security.CodeSource; import java.security.SecureClassLoader; import java.security.cert.Certificate; +/** + * The Compiler is the entry point for generating a Plan A script. The compiler will generate an ANTLR + * parse tree based on the source code that is passed in. Two passes will then be run over the parse tree, + * one for analysis using the {@link Analyzer} and another to generate the actual byte code using ASM in + * the {@link Writer}. + */ final class Compiler { + /** + * The default language API to be used with Plan A. The second construction is used + * to finalize all the variables, so there is no mistake of modification afterwards. + */ private static Definition DEFAULT_DEFINITION = new Definition(new Definition()); - /** we define the class with lowest privileges */ + /** + * Define the class with lowest privileges. + */ private static final CodeSource CODESOURCE; + /** + * Setup the code privileges. + */ static { try { + // Setup the code privileges. CODESOURCE = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null); } catch (MalformedURLException impossible) { throw new RuntimeException(impossible); } } + /** + * A secure class loader used to define Plan A scripts. + */ static class Loader extends SecureClassLoader { - Loader(ClassLoader parent) { + /** + * @param parent The parent ClassLoader. + */ + Loader(final ClassLoader parent) { super(parent); } - Class define(String name, byte[] bytes) { + /** + * Generates a Class object from the generated byte code. + * @param name The name of the class. + * @param bytes The generated byte code. + * @return A Class object extending {@link Executable}. + */ + Class define(final String name, final byte[] bytes) { return defineClass(name, bytes, 0, bytes.length, CODESOURCE).asSubclass(Executable.class); } } - static Executable compile(Loader loader, final String name, final String source, final Definition custom, CompilerSettings settings) { - long start = System.currentTimeMillis(); - - final Definition definition = custom == null ? DEFAULT_DEFINITION : new Definition(custom); - - //long end = System.currentTimeMillis() - start; - //System.out.println("types: " + end); - //start = System.currentTimeMillis(); - - //final ParserRuleContext root = createParseTree(source, types); - final ANTLRInputStream stream = new ANTLRInputStream(source); - final ErrorHandlingLexer lexer = new ErrorHandlingLexer(stream); - final PlanAParser parser = new PlanAParser(new CommonTokenStream(lexer)); - final ParserErrorStrategy strategy = new ParserErrorStrategy(); - - lexer.removeErrorListeners(); - lexer.setTypes(definition.structs.keySet()); - - //List tokens = lexer.getAllTokens(); - - //for (final Token token : tokens) { - // System.out.println(token.getType() + " " + token.getText()); - //} - - parser.removeErrorListeners(); - parser.setErrorHandler(strategy); - - ParserRuleContext root = parser.source(); - - //end = System.currentTimeMillis() - start; - //System.out.println("tree: " + end); - - final Adapter adapter = new Adapter(definition, source, root, settings); - - start = System.currentTimeMillis(); - - Analyzer.analyze(adapter); - //System.out.println(root.toStringTree(parser)); - - //end = System.currentTimeMillis() - start; - //System.out.println("analyze: " + end); - //start = System.currentTimeMillis(); - - final byte[] bytes = Writer.write(adapter); - - //end = System.currentTimeMillis() - start; - //System.out.println("write: " + end); - //start = System.currentTimeMillis(); - + /** + * Runs the two-pass compiler to generate a Plan A script. + * @param loader The ClassLoader used to define the script. + * @param name The name of the script. + * @param source The source code for the script. + * @param settings The CompilerSettings to be used during the compilation. + * @return An {@link Executable} Plan A script. + */ + static Executable compile(final Loader loader, final String name, final String source, + final Definition custom, final CompilerSettings settings) { + final Definition definition = custom != null ? new Definition(custom) : DEFAULT_DEFINITION; + final ParserRuleContext root = createParseTree(source, definition); + final Metadata metadata = new Metadata(definition, source, root, settings); + Analyzer.analyze(metadata); + final byte[] bytes = Writer.write(metadata); final Executable executable = createExecutable(loader, definition, name, source, bytes); - //end = System.currentTimeMillis() - start; - //System.out.println("create: " + end); - return executable; } - private static ParserRuleContext createParseTree(String source, Definition definition) { + /** + * Generates the ANTLR tree from the given source code. Several methods below, are used + * to ensure that the first error generated by ANTLR will cause the compilation to fail rather than + * use ANTLR's recovery strategies that may be potentially dangerous. + * @param source The source code for the script. + * @param definition The Plan A API. + * @return The root node for the ANTLR parse tree. + */ + private static ParserRuleContext createParseTree(final String source, final Definition definition) { final ANTLRInputStream stream = new ANTLRInputStream(source); final ErrorHandlingLexer lexer = new ErrorHandlingLexer(stream); final PlanAParser parser = new PlanAParser(new CommonTokenStream(lexer)); @@ -119,36 +118,50 @@ final class Compiler { lexer.removeErrorListeners(); lexer.setTypes(definition.structs.keySet()); - parser.removeErrorListeners(); parser.setErrorHandler(strategy); ParserRuleContext root = parser.source(); - // System.out.println(root.toStringTree(parser)); + return root; } - private static Executable createExecutable(Loader loader, Definition definition, String name, String source, byte[] bytes) { + /** + * Generates an {@link Executable} that can run a Plan A script. + * @param loader The {@link Loader} to define the script's class file. + * @param definition The Plan A API. + * @param name The name of the script. + * @param source The source text of the script. + * @param bytes The ASM generated byte code to define the class with. + * @return A Plan A {@link Executable} script. + */ + private static Executable createExecutable(final Loader loader, final Definition definition, + final String name, final String source, final byte[] bytes) { try { - // for debugging: - //try { - // FileOutputStream f = new FileOutputStream(new File("/Users/jdconrad/lang/generated/out.class"), false); - // f.write(bytes); - // f.close(); - //} catch (Exception e) { - // throw new RuntimeException(e); - //} + // Used for debugging. Uncomment this code and add -Dtests.security.manager=false when running to save + // the generated Java class files. The javap tool can then be used to inspect the generated byte code. + + // try { + // FileOutputStream f = new FileOutputStream(new File(""), false); + // f.write(bytes); + // f.close(); + // } catch (Exception e) { + // throw new RuntimeException(e); + // } final Class clazz = loader.define(Writer.CLASS_NAME, bytes); final java.lang.reflect.Constructor constructor = clazz.getConstructor(Definition.class, String.class, String.class); return constructor.newInstance(definition, name, source); - } catch (Exception exception) { + } catch (final Exception exception) { // Catch everything to let the user know this is something caused internally. throw new IllegalStateException( "An internal error occurred attempting to define the script [" + name + "].", exception); } } + /** + * All methods in the compiler should be static. + */ private Compiler() {} } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java index f66b65d0612..4b5e753e342 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/CompilerSettings.java @@ -19,13 +19,30 @@ package org.elasticsearch.plan.a; -/** - * Settings to use when compiling a script +/** + * Settings to use when compiling a script. */ final class CompilerSettings { + /** + * Constant to be used when specifying numeric overflow when compiling a script. + */ + public static final String NUMERIC_OVERFLOW = "numeric_overflow"; + /** + * Constant to be used when specifying the maximum loop counter when compiling a script. + */ + public static final String MAX_LOOP_COUNTER = "max_loop_counter"; + + /** + * Whether or not to allow numeric values to overflow without exception. + */ private boolean numericOverflow = true; + /** + * The maximum number of statements allowed to be run in a loop. + */ + private int maxLoopCounter = 10000; + /** * Returns {@code true} if numeric operations should overflow, {@code false} * if they should signal an exception. @@ -46,4 +63,20 @@ final class CompilerSettings { public void setNumericOverflow(boolean allow) { this.numericOverflow = allow; } + + /** + * Returns the value for the cumulative total number of statements that can be made in all loops + * in a script before an exception is thrown. This attempts to prevent infinite loops. + */ + public int getMaxLoopCounter() { + return maxLoopCounter; + } + + /** + * Set the cumulative total number of statements that can be made in all loops. + * @see #getMaxLoopCounter + */ + public void setMaxLoopCounter(int max) { + this.maxLoopCounter = max; + } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java index c7a8ce410fd..eb5e0bdd66f 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Def.java @@ -107,7 +107,7 @@ public class Def { } else if (owner instanceof List) { try { final int index = Integer.parseInt(name); - ((List)owner).add(index, value); + ((List)owner).set(index, value); } catch (NumberFormatException exception) { throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "]."); } @@ -198,7 +198,7 @@ public class Def { "in array class [" + array.getClass().getCanonicalName() + "].", throwable); } } else if (array instanceof List) { - ((List)array).add((int)index, value); + ((List)array).set((int)index, value); } else { throw new IllegalArgumentException("Attempting to address a non-array type " + "[" + array.getClass().getCanonicalName() + "] as an array."); diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java index 5c52a202919..613bdafd8ee 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Definition.java @@ -24,10 +24,14 @@ import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; class Definition { enum Sort { @@ -351,16 +355,32 @@ class Definition { final Type utilityType; final Type defobjType; + final Type itrType; + final Type oitrType; + final Type sitrType; + + final Type collectionType; + final Type ocollectionType; + final Type scollectionType; + final Type listType; final Type arraylistType; - final Type mapType; - final Type hashmapType; - final Type olistType; final Type oarraylistType; - final Type omapType; - final Type ohashmapType; + final Type slistType; + final Type sarraylistType; + final Type setType; + final Type hashsetType; + final Type osetType; + final Type ohashsetType; + final Type ssetType; + final Type shashsetType; + + final Type mapType; + final Type hashmapType; + final Type oomapType; + final Type oohashmapType; final Type smapType; final Type shashmapType; final Type somapType; @@ -368,6 +388,12 @@ class Definition { final Type execType; + final Type exceptionType; + final Type arithexcepType; + final Type iargexcepType; + final Type istateexceptType; + final Type nfexcepType; + public Definition() { structs = new HashMap<>(); classes = new HashMap<>(); @@ -406,16 +432,32 @@ class Definition { utilityType = getType("Utility"); defobjType = getType("Def"); + itrType = getType("Iterator"); + oitrType = getType("Iterator"); + sitrType = getType("Iterator"); + + collectionType = getType("Collection"); + ocollectionType = getType("Collection"); + scollectionType = getType("Collection"); + listType = getType("List"); arraylistType = getType("ArrayList"); - mapType = getType("Map"); - hashmapType = getType("HashMap"); - olistType = getType("List"); oarraylistType = getType("ArrayList"); - omapType = getType("Map"); - ohashmapType = getType("HashMap"); + slistType = getType("List"); + sarraylistType = getType("ArrayList"); + setType = getType("Set"); + hashsetType = getType("HashSet"); + osetType = getType("Set"); + ohashsetType = getType("HashSet"); + ssetType = getType("Set"); + shashsetType = getType("HashSet"); + + mapType = getType("Map"); + hashmapType = getType("HashMap"); + oomapType = getType("Map"); + oohashmapType = getType("HashMap"); smapType = getType("Map"); shashmapType = getType("HashMap"); somapType = getType("Map"); @@ -423,6 +465,12 @@ class Definition { execType = getType("Executable"); + exceptionType = getType("Exception"); + arithexcepType = getType("ArithmeticException"); + iargexcepType = getType("IllegalArgumentException"); + istateexceptType = getType("IllegalStateException"); + nfexcepType = getType("NumberFormatException"); + addDefaultElements(); copyDefaultStructs(); addDefaultTransforms(); @@ -478,22 +526,44 @@ class Definition { utilityType = definition.utilityType; defobjType = definition.defobjType; + itrType = definition.itrType; + oitrType = definition.oitrType; + sitrType = definition.sitrType; + + collectionType = definition.collectionType; + ocollectionType = definition.ocollectionType; + scollectionType = definition.scollectionType; + listType = definition.listType; arraylistType = definition.arraylistType; - mapType = definition.mapType; - hashmapType = definition.hashmapType; - olistType = definition.olistType; oarraylistType = definition.oarraylistType; - omapType = definition.omapType; - ohashmapType = definition.ohashmapType; + slistType = definition.slistType; + sarraylistType = definition.sarraylistType; + setType = definition.setType; + hashsetType = definition.hashsetType; + osetType = definition.osetType; + ohashsetType = definition.ohashsetType; + ssetType = definition.ssetType; + shashsetType = definition.shashsetType; + + mapType = definition.mapType; + hashmapType = definition.hashmapType; + oomapType = definition.oomapType; + oohashmapType = definition.oohashmapType; smapType = definition.smapType; shashmapType = definition.shashmapType; somapType = definition.somapType; sohashmapType = definition.sohashmapType; execType = definition.execType; + + exceptionType = definition.exceptionType; + arithexcepType = definition.arithexcepType; + iargexcepType = definition.iargexcepType; + istateexceptType = definition.istateexceptType; + nfexcepType = definition.nfexcepType; } private void addDefaultStructs() { @@ -526,22 +596,44 @@ class Definition { addStruct( "Utility" , Utility.class ); addStruct( "Def" , Def.class ); - addStruct( "List" , List.class ); - addStruct( "ArrayList" , ArrayList.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); + addStruct( "Iterator" , Iterator.class ); + addStruct( "Iterator" , Iterator.class ); + addStruct( "Iterator" , Iterator.class ); - addStruct( "List" , List.class ); - addStruct( "ArrayList" , ArrayList.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); + addStruct( "Collection" , Collection.class ); + addStruct( "Collection" , Collection.class ); + addStruct( "Collection" , Collection.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); - addStruct( "Map" , Map.class ); - addStruct( "HashMap" , HashMap.class ); + addStruct( "List" , List.class ); + addStruct( "ArrayList" , ArrayList.class ); + addStruct( "List" , List.class ); + addStruct( "ArrayList" , ArrayList.class ); + addStruct( "List" , List.class ); + addStruct( "ArrayList" , ArrayList.class ); + + addStruct( "Set" , Set.class ); + addStruct( "HashSet" , HashSet.class ); + addStruct( "Set" , Set.class ); + addStruct( "HashSet" , HashSet.class ); + addStruct( "Set" , Set.class ); + addStruct( "HashSet" , HashSet.class ); + + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); + addStruct( "Map" , Map.class ); + addStruct( "HashMap" , HashMap.class ); addStruct( "Executable" , Executable.class ); + + addStruct( "Exception" , Exception.class); + addStruct( "ArithmeticException" , ArithmeticException.class); + addStruct( "IllegalArgumentException" , IllegalArgumentException.class); + addStruct( "IllegalStateException" , IllegalStateException.class); + addStruct( "NumberFormatException" , NumberFormatException.class); } private void addDefaultClasses() { @@ -568,10 +660,16 @@ class Definition { addClass("CharSequence"); addClass("String"); + addClass("Iterator"); + addClass("Collection"); addClass("List"); addClass("ArrayList"); + addClass("Set"); + addClass("HashSet"); addClass("Map"); addClass("HashMap"); + + addClass("Exception"); } private void addDefaultElements() { @@ -587,45 +685,39 @@ class Definition { addMethod("Boolean", "valueOf", null, true, booleanobjType, new Type[] {booleanType}, null, null); addMethod("Boolean", "booleanValue", null, false, booleanType, new Type[] {}, null, null); - addConstructor("Byte", "new", new Type[]{byteType}, null); + addConstructor("Byte", "new", new Type[] {byteType}, null); addMethod("Byte", "valueOf", null, true, byteobjType, new Type[] {byteType}, null, null); - addMethod("Byte", "byteValue", null, false, byteType, new Type[] {}, null, null); addField("Byte", "MIN_VALUE", null, true, byteType, null); addField("Byte", "MAX_VALUE", null, true, byteType, null); - addConstructor("Short", "new", new Type[]{shortType}, null); + addConstructor("Short", "new", new Type[] {shortType}, null); addMethod("Short", "valueOf", null, true, shortobjType, new Type[] {shortType}, null, null); - addMethod("Short", "shortValue", null, false, shortType, new Type[] {}, null, null); addField("Short", "MIN_VALUE", null, true, shortType, null); addField("Short", "MAX_VALUE", null, true, shortType, null); - addConstructor("Character", "new", new Type[]{charType}, null); + addConstructor("Character", "new", new Type[] {charType}, null); addMethod("Character", "valueOf", null, true, charobjType, new Type[] {charType}, null, null); addMethod("Character", "charValue", null, false, charType, new Type[] {}, null, null); addField("Character", "MIN_VALUE", null, true, charType, null); addField("Character", "MAX_VALUE", null, true, charType, null); - addConstructor("Integer", "new", new Type[]{intType}, null); + addConstructor("Integer", "new", new Type[] {intType}, null); addMethod("Integer", "valueOf", null, true, intobjType, new Type[] {intType}, null, null); - addMethod("Integer", "intValue", null, false, intType, new Type[] {}, null, null); addField("Integer", "MIN_VALUE", null, true, intType, null); addField("Integer", "MAX_VALUE", null, true, intType, null); - addConstructor("Long", "new", new Type[]{longType}, null); + addConstructor("Long", "new", new Type[] {longType}, null); addMethod("Long", "valueOf", null, true, longobjType, new Type[] {longType}, null, null); - addMethod("Long", "longValue", null, false, longType, new Type[] {}, null, null); addField("Long", "MIN_VALUE", null, true, longType, null); addField("Long", "MAX_VALUE", null, true, longType, null); - addConstructor("Float", "new", new Type[]{floatType}, null); + addConstructor("Float", "new", new Type[] {floatType}, null); addMethod("Float", "valueOf", null, true, floatobjType, new Type[] {floatType}, null, null); - addMethod("Float", "floatValue", null, false, floatType, new Type[] {}, null, null); addField("Float", "MIN_VALUE", null, true, floatType, null); addField("Float", "MAX_VALUE", null, true, floatType, null); - addConstructor("Double", "new", new Type[]{doubleType}, null); + addConstructor("Double", "new", new Type[] {doubleType}, null); addMethod("Double", "valueOf", null, true, doubleobjType, new Type[] {doubleType}, null, null); - addMethod("Double", "doubleValue", null, false, doubleType, new Type[] {}, null, null); addField("Double", "MIN_VALUE", null, true, doubleType, null); addField("Double", "MAX_VALUE", null, true, doubleType, null); @@ -760,7 +852,44 @@ class Definition { addMethod("Utility", "DoubleToboolean", null, true, booleanType, new Type[] {doubleobjType}, null, null); addMethod("Utility", "DoubleTochar", null, true, charType, new Type[] {doubleobjType}, null, null); - addMethod("Math", "dmax", "max", true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "abs", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "fabs", "abs", true, floatType, new Type[] {floatType}, null, null); + addMethod("Math", "labs", "abs", true, longType, new Type[] {longType}, null, null); + addMethod("Math", "iabs", "abs", true, intType, new Type[] {intType}, null, null); + addMethod("Math", "acos", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "asin", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "atan", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "atan2", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "cbrt", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "ceil", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "cos", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "cosh", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "exp", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "expm1", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "floor", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "hypot", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "log", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "log10", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "log1p", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "max", "max", true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "fmax", "max", true, floatType, new Type[] {floatType, floatType}, null, null); + addMethod("Math", "lmax", "max", true, longType, new Type[] {longType, longType}, null, null); + addMethod("Math", "imax", "max", true, intType, new Type[] {intType, intType}, null, null); + addMethod("Math", "min", "min", true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "fmin", "min", true, floatType, new Type[] {floatType, floatType}, null, null); + addMethod("Math", "lmin", "min", true, longType, new Type[] {longType, longType}, null, null); + addMethod("Math", "imin", "min", true, intType, new Type[] {intType, intType}, null, null); + addMethod("Math", "pow", null, true, doubleType, new Type[] {doubleType, doubleType}, null, null); + addMethod("Math", "random", null, true, doubleType, new Type[] {}, null, null); + addMethod("Math", "rint", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "round", null, true, longType, new Type[] {doubleType}, null, null); + addMethod("Math", "sin", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "sinh", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "sqrt", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "tan", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "tanh", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "toDegrees", null, true, doubleType, new Type[] {doubleType}, null, null); + addMethod("Math", "toRadians", null, true, doubleType, new Type[] {doubleType}, null, null); addMethod("Def", "DefToboolean", null, true, booleanType, new Type[] {defType}, null, null); addMethod("Def", "DefTobyte", null, true, byteType, new Type[] {defType}, null, null); @@ -778,56 +907,124 @@ class Definition { addMethod("Def", "DefToLong", null, true, longobjType, new Type[] {defType}, null, null); addMethod("Def", "DefToFloat", null, true, floatobjType, new Type[] {defType}, null, null); addMethod("Def", "DefToDouble", null, true, doubleobjType, new Type[] {defType}, null, null); - - addMethod("List", "addLast", "add", false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); - addMethod("List", "add", null, false, voidType, new Type[] {intType, objectType}, null, new Type[] {intType, defType}); + + addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); + addMethod("Iterator", "next", null, false, objectType, new Type[] {}, defType, null); + addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); + + addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); + addMethod("Iterator", "next", null, false, objectType, new Type[] {}, null, null); + addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); + + addMethod("Iterator", "hasNext", null, false, booleanType, new Type[] {}, null, null); + addMethod("Iterator", "next", null, false, objectType, new Type[] {}, stringType, null); + addMethod("Iterator", "remove", null, false, voidType, new Type[] {}, null, null); + + addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); + addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Collection", "iterator", null, false, itrType, new Type[] {}, null, null); + addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); + + addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); + addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Collection", "iterator", null, false, oitrType, new Type[] {}, null, null); + addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); + + addMethod("Collection", "add", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Collection", "clear", null, false, voidType, new Type[] {}, null, null); + addMethod("Collection", "contains", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Collection", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Collection", "iterator", null, false, sitrType, new Type[] {}, null, null); + addMethod("Collection", "remove", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Collection", "size", null, false, intType, new Type[] {}, null, null); + + addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, defType, new Type[] {intType, defType}); addMethod("List", "get", null, false, objectType, new Type[] {intType}, defType, null); addMethod("List", "remove", null, false, objectType, new Type[] {intType}, defType, null); - addMethod("List", "size", null, false, intType, new Type[] {}, null, null); - addMethod("List", "isEmpty", null, false, booleanType, new Type[] {}, null, null); addConstructor("ArrayList", "new", new Type[] {}, null); + addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, null, null); + addMethod("List", "get", null, false, objectType, new Type[] {intType}, null, null); + addMethod("List", "remove", null, false, objectType, new Type[] {intType}, null, null); + + addConstructor("ArrayList", "new", new Type[] {}, null); + + addMethod("List", "set", null, false, objectType, new Type[] {intType, objectType}, stringType, new Type[] {intType, stringType}); + addMethod("List", "get", null, false, objectType, new Type[] {intType}, stringType, null); + addMethod("List", "remove", null, false, objectType, new Type[] {intType}, stringType, null); + + addConstructor("ArrayList", "new", new Type[] {}, null); + + addConstructor("HashSet", "new", new Type[] {}, null); + + addConstructor("HashSet", "new", new Type[] {}, null); + + addConstructor("HashSet", "new", new Type[] {}, null); + addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {defType, defType}); addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {defType}); addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Map", "keySet", null, false, osetType, new Type[] {}, setType, null); + addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, collectionType, null); addConstructor("HashMap", "new", new Type[] {}, null); - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {stringType, defType}); - addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); - addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); - addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - - addConstructor("HashMap", "new", new Type[] {}, null); - - addMethod("List", "addLast", "add", false, booleanType, new Type[] {objectType}, null, null); - addMethod("List", "add", null, false, voidType, new Type[] {intType, objectType}, null, null); - addMethod("List", "get", null, false, objectType, new Type[] {intType}, null, null); - addMethod("List", "remove", null, false, objectType, new Type[] {intType}, null, null); - addMethod("List", "size", null, false, intType, new Type[] {}, null, null); - addMethod("List", "isEmpty", null, false, booleanType, new Type[] {}, null, null); - - addConstructor("ArrayList", "new", new Type[] {}, null); - addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, null, null); addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, null, null); addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, null); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Map", "keySet", null, false, osetType, new Type[] {}, null, null); + addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, null, null); addConstructor("HashMap", "new", new Type[] {}, null); + addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, defType, new Type[] {stringType, defType}); + addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); + addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, defType, new Type[] {stringType}); + addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, new Type[] {defType}); + addMethod("Map", "keySet", null, false, osetType, new Type[] {}, ssetType, null); + addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, collectionType, null); + + addConstructor("HashMap", "new", new Type[] {}, null); + addMethod("Map", "put", null, false, objectType, new Type[] {objectType, objectType}, null, new Type[] {stringType, objectType}); addMethod("Map", "get", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType}); addMethod("Map", "remove", null, false, objectType, new Type[] {objectType}, null, new Type[] {stringType}); - addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); addMethod("Map", "isEmpty", null, false, booleanType, new Type[] {}, null, null); + addMethod("Map", "size", null, false, intType, new Type[] {}, null, null); + addMethod("Map", "containsKey", null, false, booleanType, new Type[] {objectType}, null, new Type[] {stringType}); + addMethod("Map", "containsValue", null, false, booleanType, new Type[] {objectType}, null, null); + addMethod("Map", "keySet", null, false, osetType, new Type[] {}, ssetType, null); + addMethod("Map", "values", null, false, ocollectionType, new Type[] {}, null, null); addConstructor("HashMap", "new", new Type[] {}, null); + + addMethod("Exception", "getMessage", null, false, stringType, new Type[] {}, null, null); + + addConstructor("ArithmeticException", "new", new Type[] {stringType}, null); + + addConstructor("IllegalArgumentException", "new", new Type[] {stringType}, null); + + addConstructor("IllegalStateException", "new", new Type[] {stringType}, null); + + addConstructor("NumberFormatException", "new", new Type[] {stringType}, null); } private void copyDefaultStructs() { @@ -845,21 +1042,36 @@ class Definition { copyStruct("CharSequence", "Object"); copyStruct("String", "CharSequence", "Object"); - copyStruct("List", "Object"); - copyStruct("ArrayList", "List", "Object"); + copyStruct("List", "Collection", "Object"); + copyStruct("ArrayList", "List", "Collection", "Object"); + copyStruct("List", "Collection", "Object"); + copyStruct("ArrayList", "List", "Collection", "Object"); + copyStruct("List", "Collection", "Object"); + copyStruct("ArrayList", "List", "Collection", "Object"); + + copyStruct("Set", "Collection", "Object"); + copyStruct("HashSet", "Set", "Collection", "Object"); + copyStruct("Set", "Collection", "Object"); + copyStruct("HashSet", "Set", "Collection", "Object"); + copyStruct("Set", "Collection", "Object"); + copyStruct("HashSet", "Set", "Collection", "Object"); + copyStruct("Map", "Object"); copyStruct("HashMap", "Map", "Object"); - copyStruct("Map", "Object"); - copyStruct("HashMap", "Map", "Object"); - - copyStruct("List", "Object"); - copyStruct("ArrayList", "List", "Object"); copyStruct("Map", "Object"); copyStruct("HashMap", "Map", "Object"); + copyStruct("Map", "Object"); + copyStruct("HashMap", "Map", "Object"); copyStruct("Map", "Object"); copyStruct("HashMap", "Map", "Object"); copyStruct("Executable", "Object"); + + copyStruct("Exception", "Object"); + copyStruct("ArithmeticException", "Exception", "Object"); + copyStruct("IllegalArgumentException", "Exception", "Object"); + copyStruct("IllegalStateException", "Exception", "Object"); + copyStruct("NumberFormatException", "Exception", "Object"); } private void addDefaultTransforms() { @@ -984,7 +1196,7 @@ class Definition { addTransform(defType, longobjType, "Def", "DefToLong", true); addTransform(defType, floatobjType, "Def", "DefToFloat", true); addTransform(defType, doubleobjType, "Def", "DefToDouble", true); - + addTransform(numberType, booleanType, "Utility", "NumberToboolean", true); addTransform(numberType, byteType, "Number", "byteValue", false); addTransform(numberType, shortType, "Number", "shortValue", false); @@ -1162,41 +1374,155 @@ class Definition { addBound(stringType, charseqType, charseqType); + addBound(oitrType, itrType, itrType); + addBound(oitrType, sitrType, itrType); + addBound(sitrType, itrType, itrType); + + addBound(ocollectionType, collectionType, collectionType); + addBound(scollectionType, collectionType, collectionType); + addBound(scollectionType, ocollectionType, ocollectionType); + addBound(listType, collectionType, collectionType); + addBound(listType, ocollectionType, collectionType); + addBound(listType, scollectionType, collectionType); + addBound(arraylistType, collectionType, collectionType); + addBound(arraylistType, ocollectionType, collectionType); + addBound(arraylistType, scollectionType, collectionType); addBound(arraylistType, listType, listType); + addBound(olistType, collectionType, collectionType); + addBound(olistType, ocollectionType, ocollectionType); + addBound(olistType, scollectionType, ocollectionType); addBound(olistType, listType, listType); addBound(olistType, arraylistType, listType); + addBound(oarraylistType, collectionType, collectionType); + addBound(oarraylistType, ocollectionType, ocollectionType); + addBound(oarraylistType, scollectionType, ocollectionType); addBound(oarraylistType, listType, listType); - addBound(oarraylistType, olistType, olistType); addBound(oarraylistType, arraylistType, arraylistType); + addBound(oarraylistType, olistType, olistType); + addBound(slistType, collectionType, collectionType); + addBound(slistType, ocollectionType, ocollectionType); + addBound(slistType, scollectionType, scollectionType); + addBound(slistType, listType, listType); + addBound(slistType, arraylistType, listType); + addBound(slistType, olistType, olistType); + addBound(slistType, oarraylistType, olistType); + addBound(sarraylistType, collectionType, collectionType); + addBound(sarraylistType, ocollectionType, ocollectionType); + addBound(sarraylistType, scollectionType, scollectionType); + addBound(sarraylistType, listType, listType); + addBound(sarraylistType, arraylistType, arraylistType); + addBound(sarraylistType, olistType, olistType); + addBound(sarraylistType, oarraylistType, oarraylistType); + addBound(sarraylistType, slistType, slistType); + addBound(setType, collectionType, collectionType); + addBound(setType, ocollectionType, collectionType); + addBound(setType, scollectionType, collectionType); + addBound(setType, listType, collectionType); + addBound(setType, arraylistType, collectionType); + addBound(setType, olistType, collectionType); + addBound(setType, oarraylistType, collectionType); + addBound(setType, slistType, collectionType); + addBound(setType, sarraylistType, collectionType); + addBound(hashsetType, collectionType, collectionType); + addBound(hashsetType, ocollectionType, collectionType); + addBound(hashsetType, scollectionType, collectionType); + addBound(hashsetType, listType, collectionType); + addBound(hashsetType, arraylistType, collectionType); + addBound(hashsetType, olistType, collectionType); + addBound(hashsetType, oarraylistType, collectionType); + addBound(hashsetType, slistType, collectionType); + addBound(hashsetType, sarraylistType, collectionType); + addBound(hashsetType, setType, setType); + addBound(osetType, collectionType, collectionType); + addBound(osetType, ocollectionType, ocollectionType); + addBound(osetType, scollectionType, ocollectionType); + addBound(osetType, listType, collectionType); + addBound(osetType, arraylistType, collectionType); + addBound(osetType, olistType, ocollectionType); + addBound(osetType, oarraylistType, ocollectionType); + addBound(osetType, slistType, ocollectionType); + addBound(osetType, sarraylistType, ocollectionType); + addBound(osetType, setType, setType); + addBound(osetType, hashsetType, setType); + addBound(ohashsetType, collectionType, collectionType); + addBound(ohashsetType, ocollectionType, ocollectionType); + addBound(ohashsetType, scollectionType, ocollectionType); + addBound(ohashsetType, listType, collectionType); + addBound(ohashsetType, arraylistType, collectionType); + addBound(ohashsetType, olistType, ocollectionType); + addBound(ohashsetType, oarraylistType, ocollectionType); + addBound(ohashsetType, slistType, ocollectionType); + addBound(ohashsetType, sarraylistType, ocollectionType); + addBound(ohashsetType, setType, setType); + addBound(ohashsetType, hashsetType, hashsetType); + addBound(ohashsetType, osetType, osetType); + addBound(ssetType, collectionType, collectionType); + addBound(ssetType, ocollectionType, ocollectionType); + addBound(ssetType, scollectionType, scollectionType); + addBound(ssetType, listType, collectionType); + addBound(ssetType, arraylistType, collectionType); + addBound(ssetType, olistType, ocollectionType); + addBound(ssetType, oarraylistType, ocollectionType); + addBound(ssetType, slistType, scollectionType); + addBound(ssetType, sarraylistType, scollectionType); + addBound(ssetType, setType, setType); + addBound(ssetType, hashsetType, setType); + addBound(ssetType, osetType, osetType); + addBound(ssetType, ohashsetType, osetType); + addBound(shashsetType, collectionType, collectionType); + addBound(shashsetType, ocollectionType, ocollectionType); + addBound(shashsetType, scollectionType, scollectionType); + addBound(shashsetType, listType, collectionType); + addBound(shashsetType, arraylistType, collectionType); + addBound(shashsetType, olistType, ocollectionType); + addBound(shashsetType, oarraylistType, ocollectionType); + addBound(shashsetType, slistType, scollectionType); + addBound(shashsetType, sarraylistType, scollectionType); + addBound(shashsetType, setType, setType); + addBound(shashsetType, hashsetType, hashsetType); + addBound(shashsetType, osetType, osetType); + addBound(shashsetType, ohashsetType, hashsetType); + addBound(shashsetType, ssetType, ssetType); addBound(hashmapType, mapType, mapType); - addBound(omapType, mapType, mapType); - addBound(omapType, hashmapType, mapType); - addBound(ohashmapType, mapType, mapType); - addBound(ohashmapType, hashmapType, hashmapType); - addBound(ohashmapType, omapType, omapType); + addBound(oomapType, mapType, mapType); + addBound(oomapType, hashmapType, mapType); + addBound(oohashmapType, mapType, mapType); + addBound(oohashmapType, hashmapType, hashmapType); + addBound(oohashmapType, oomapType, oomapType); addBound(smapType, mapType, mapType); addBound(smapType, hashmapType, mapType); - addBound(smapType, omapType, omapType); - addBound(smapType, ohashmapType, omapType); + addBound(smapType, oomapType, oomapType); + addBound(smapType, oohashmapType, oomapType); addBound(shashmapType, mapType, mapType); addBound(shashmapType, hashmapType, hashmapType); - addBound(shashmapType, omapType, omapType); - addBound(shashmapType, ohashmapType, ohashmapType); + addBound(shashmapType, oomapType, oomapType); + addBound(shashmapType, oohashmapType, oohashmapType); addBound(shashmapType, smapType, smapType); addBound(somapType, mapType, mapType); addBound(somapType, hashmapType, mapType); - addBound(somapType, omapType, omapType); - addBound(somapType, ohashmapType, omapType); + addBound(somapType, oomapType, oomapType); + addBound(somapType, oohashmapType, oomapType); addBound(somapType, smapType, smapType); addBound(somapType, shashmapType, smapType); addBound(sohashmapType, mapType, mapType); addBound(sohashmapType, hashmapType, hashmapType); - addBound(sohashmapType, omapType, omapType); - addBound(sohashmapType, ohashmapType, ohashmapType); + addBound(sohashmapType, oomapType, oomapType); + addBound(sohashmapType, oohashmapType, oohashmapType); addBound(sohashmapType, smapType, smapType); addBound(sohashmapType, shashmapType, shashmapType); addBound(sohashmapType, somapType, somapType); + + addBound(arithexcepType, exceptionType, exceptionType); + addBound(iargexcepType, exceptionType, exceptionType); + addBound(istateexceptType, exceptionType, exceptionType); + addBound(nfexcepType, exceptionType, exceptionType); + addBound(arithexcepType, iargexcepType, exceptionType); + addBound(arithexcepType, istateexceptType, exceptionType); + addBound(arithexcepType, nfexcepType, exceptionType); + addBound(iargexcepType, istateexceptType, exceptionType); + addBound(iargexcepType, nfexcepType, exceptionType); + addBound(istateexceptType, nfexcepType, exceptionType); } public final void addStruct(final String name, final Class clazz) { @@ -1232,30 +1558,30 @@ class Definition { if (owner == null) { throw new IllegalArgumentException( - "Owner struct [" + struct + "] not defined for constructor [" + name + "]."); + "Owner struct [" + struct + "] not defined for constructor [" + name + "]."); } if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { throw new IllegalArgumentException( - "Invalid constructor name [" + name + "] with the struct [" + owner.name + "]."); + "Invalid constructor name [" + name + "] with the struct [" + owner.name + "]."); } if (owner.constructors.containsKey(name)) { throw new IllegalArgumentException( - "Duplicate constructor name [" + name + "] found within the struct [" + owner.name + "]."); + "Duplicate constructor name [" + name + "] found within the struct [" + owner.name + "]."); } if (owner.statics.containsKey(name)) { throw new IllegalArgumentException("Constructors and functions may not have the same name" + - " [" + name + "] within the same struct [" + owner.name + "]."); + " [" + name + "] within the same struct [" + owner.name + "]."); } if (owner.methods.containsKey(name)) { throw new IllegalArgumentException("Constructors and methods may not have the same name" + - " [" + name + "] within the same struct [" + owner.name + "]."); + " [" + name + "] within the same struct [" + owner.name + "]."); } - final Class[] classes = new Class[args.length]; + final Class[] classes = new Class[args.length]; for (int count = 0; count < classes.length; ++count) { if (genargs != null) { @@ -1263,8 +1589,8 @@ class Definition { genargs[count].clazz.asSubclass(args[count].clazz); } catch (ClassCastException exception) { throw new ClassCastException("Generic argument [" + genargs[count].name + "]" + - " is not a sub class of [" + args[count].name + "] in the constructor" + - " [" + name + " ] from the struct [" + owner.name + "]."); + " is not a sub class of [" + args[count].name + "] in the constructor" + + " [" + name + " ] from the struct [" + owner.name + "]."); } } @@ -1277,12 +1603,12 @@ class Definition { reflect = owner.clazz.getConstructor(classes); } catch (NoSuchMethodException exception) { throw new IllegalArgumentException("Constructor [" + name + "] not found for class" + - " [" + owner.clazz.getName() + "] with arguments " + Arrays.toString(classes) + "."); + " [" + owner.clazz.getName() + "] with arguments " + Arrays.toString(classes) + "."); } final org.objectweb.asm.commons.Method asm = org.objectweb.asm.commons.Method.getMethod(reflect); final Constructor constructor = - new Constructor(name, owner, Arrays.asList(genargs != null ? genargs : args), asm, reflect); + new Constructor(name, owner, Arrays.asList(genargs != null ? genargs : args), asm, reflect); owner.constructors.put(name, constructor); } @@ -1293,37 +1619,37 @@ class Definition { if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined" + - " for " + (statik ? "function" : "method") + " [" + name + "]."); + " for " + (statik ? "function" : "method") + " [" + name + "]."); } if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { throw new IllegalArgumentException("Invalid " + (statik ? "function" : "method") + - " name [" + name + "] with the struct [" + owner.name + "]."); + " name [" + name + "] with the struct [" + owner.name + "]."); } if (owner.constructors.containsKey(name)) { throw new IllegalArgumentException("Constructors and " + (statik ? "functions" : "methods") + - " may not have the same name [" + name + "] within the same struct" + - " [" + owner.name + "]."); + " may not have the same name [" + name + "] within the same struct" + + " [" + owner.name + "]."); } if (owner.statics.containsKey(name)) { if (statik) { throw new IllegalArgumentException( - "Duplicate function name [" + name + "] found within the struct [" + owner.name + "]."); + "Duplicate function name [" + name + "] found within the struct [" + owner.name + "]."); } else { throw new IllegalArgumentException("Functions and methods may not have the same name" + - " [" + name + "] within the same struct [" + owner.name + "]."); + " [" + name + "] within the same struct [" + owner.name + "]."); } } if (owner.methods.containsKey(name)) { if (statik) { throw new IllegalArgumentException("Functions and methods may not have the same name" + - " [" + name + "] within the same struct [" + owner.name + "]."); + " [" + name + "] within the same struct [" + owner.name + "]."); } else { throw new IllegalArgumentException("Duplicate method name [" + name + "]" + - " found within the struct [" + owner.name + "]."); + " found within the struct [" + owner.name + "]."); } } @@ -1332,18 +1658,18 @@ class Definition { genrtn.clazz.asSubclass(rtn.clazz); } catch (ClassCastException exception) { throw new ClassCastException("Generic return [" + genrtn.clazz.getCanonicalName() + "]" + - " is not a sub class of [" + rtn.clazz.getCanonicalName() + "] in the method" + - " [" + name + " ] from the struct [" + owner.name + "]."); + " is not a sub class of [" + rtn.clazz.getCanonicalName() + "] in the method" + + " [" + name + " ] from the struct [" + owner.name + "]."); } } if (genargs != null && genargs.length != args.length) { throw new IllegalArgumentException("Generic arguments arity [" + genargs.length + "] is not the same as " + - (statik ? "function" : "method") + " [" + name + "] arguments arity" + - " [" + args.length + "] within the struct [" + owner.name + "]."); + (statik ? "function" : "method") + " [" + name + "] arguments arity" + + " [" + args.length + "] within the struct [" + owner.name + "]."); } - final Class[] classes = new Class[args.length]; + final Class[] classes = new Class[args.length]; for (int count = 0; count < classes.length; ++count) { if (genargs != null) { @@ -1351,8 +1677,8 @@ class Definition { genargs[count].clazz.asSubclass(args[count].clazz); } catch (ClassCastException exception) { throw new ClassCastException("Generic argument [" + genargs[count].name + "] is not a sub class" + - " of [" + args[count].name + "] in the " + (statik ? "function" : "method") + - " [" + name + " ] from the struct [" + owner.name + "]."); + " of [" + args[count].name + "] in the " + (statik ? "function" : "method") + + " [" + name + " ] from the struct [" + owner.name + "]."); } } @@ -1365,15 +1691,15 @@ class Definition { reflect = owner.clazz.getMethod(alias == null ? name : alias, classes); } catch (NoSuchMethodException exception) { throw new IllegalArgumentException((statik ? "Function" : "Method") + - " [" + (alias == null ? name : alias) + "] not found for class [" + owner.clazz.getName() + "]" + - " with arguments " + Arrays.toString(classes) + "."); + " [" + (alias == null ? name : alias) + "] not found for class [" + owner.clazz.getName() + "]" + + " with arguments " + Arrays.toString(classes) + "."); } if (!reflect.getReturnType().equals(rtn.clazz)) { throw new IllegalArgumentException("Specified return type class [" + rtn.clazz + "]" + - " does not match the found return type class [" + reflect.getReturnType() + "] for the " + - (statik ? "function" : "method") + " [" + name + "]" + - " within the struct [" + owner.name + "]."); + " does not match the found return type class [" + reflect.getReturnType() + "] for the " + + (statik ? "function" : "method") + " [" + name + "]" + + " within the struct [" + owner.name + "]."); } final org.objectweb.asm.commons.Method asm = org.objectweb.asm.commons.Method.getMethod(reflect); @@ -1383,32 +1709,32 @@ class Definition { try { if (statik) { handle = MethodHandles.publicLookup().in(owner.clazz).findStatic( - owner.clazz, alias == null ? name : alias, MethodType.methodType(rtn.clazz, classes)); + owner.clazz, alias == null ? name : alias, MethodType.methodType(rtn.clazz, classes)); } else { handle = MethodHandles.publicLookup().in(owner.clazz).findVirtual( - owner.clazz, alias == null ? name : alias, MethodType.methodType(rtn.clazz, classes)); + owner.clazz, alias == null ? name : alias, MethodType.methodType(rtn.clazz, classes)); } } catch (NoSuchMethodException | IllegalAccessException exception) { throw new IllegalArgumentException("Method [" + (alias == null ? name : alias) + "]" + - " not found for class [" + owner.clazz.getName() + "]" + - " with arguments " + Arrays.toString(classes) + "."); + " not found for class [" + owner.clazz.getName() + "]" + + " with arguments " + Arrays.toString(classes) + "."); } final Method method = new Method(name, owner, genrtn != null ? genrtn : rtn, - Arrays.asList(genargs != null ? genargs : args), asm, reflect, handle); + Arrays.asList(genargs != null ? genargs : args), asm, reflect, handle); final int modifiers = reflect.getModifiers(); if (statik) { if (!java.lang.reflect.Modifier.isStatic(modifiers)) { throw new IllegalArgumentException("Function [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to a static Java method."); + " within the struct [" + owner.name + "] is not linked to a static Java method."); } owner.functions.put(name, method); } else { if (java.lang.reflect.Modifier.isStatic(modifiers)) { throw new IllegalArgumentException("Method [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to a non-static Java method."); + " within the struct [" + owner.name + "] is not linked to a non-static Java method."); } owner.methods.put(name, method); @@ -1421,31 +1747,31 @@ class Definition { if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for " + - (statik ? "static" : "member") + " [" + name + "]."); + (statik ? "static" : "member") + " [" + name + "]."); } if (!name.matches("^[_a-zA-Z][_a-zA-Z0-9]*$")) { throw new IllegalArgumentException("Invalid " + (statik ? "static" : "member") + - " name [" + name + "] with the struct [" + owner.name + "]."); + " name [" + name + "] with the struct [" + owner.name + "]."); } if (owner.statics.containsKey(name)) { if (statik) { throw new IllegalArgumentException("Duplicate static name [" + name + "]" + - " found within the struct [" + owner.name + "]."); + " found within the struct [" + owner.name + "]."); } else { throw new IllegalArgumentException("Statics and members may not have the same name " + - "[" + name + "] within the same struct [" + owner.name + "]."); + "[" + name + "] within the same struct [" + owner.name + "]."); } } if (owner.members.containsKey(name)) { if (statik) { throw new IllegalArgumentException("Statics and members may not have the same name " + - "[" + name + "] within the same struct [" + owner.name + "]."); + "[" + name + "] within the same struct [" + owner.name + "]."); } else { throw new IllegalArgumentException("Duplicate member name [" + name + "]" + - " found within the struct [" + owner.name + "]."); + " found within the struct [" + owner.name + "]."); } } @@ -1454,8 +1780,8 @@ class Definition { generic.clazz.asSubclass(type.clazz); } catch (ClassCastException exception) { throw new ClassCastException("Generic type [" + generic.clazz.getCanonicalName() + "]" + - " is not a sub class of [" + type.clazz.getCanonicalName() + "] for the field" + - " [" + name + " ] from the struct [" + owner.name + "]."); + " is not a sub class of [" + type.clazz.getCanonicalName() + "] for the field" + + " [" + name + " ] from the struct [" + owner.name + "]."); } } @@ -1465,7 +1791,7 @@ class Definition { reflect = owner.clazz.getField(alias == null ? name : alias); } catch (NoSuchFieldException exception) { throw new IllegalArgumentException("Field [" + (alias == null ? name : alias) + "]" + - " not found for class [" + owner.clazz.getName() + "]."); + " not found for class [" + owner.clazz.getName() + "]."); } MethodHandle getter = null; @@ -1474,13 +1800,13 @@ class Definition { try { if (!statik) { getter = MethodHandles.publicLookup().in(owner.clazz).findGetter( - owner.clazz, alias == null ? name : alias, type.clazz); + owner.clazz, alias == null ? name : alias, type.clazz); setter = MethodHandles.publicLookup().in(owner.clazz).findSetter( - owner.clazz, alias == null ? name : alias, type.clazz); + owner.clazz, alias == null ? name : alias, type.clazz); } } catch (NoSuchFieldException | IllegalAccessException exception) { throw new IllegalArgumentException("Getter/Setter [" + (alias == null ? name : alias) + "]" + - " not found for class [" + owner.clazz.getName() + "]."); + " not found for class [" + owner.clazz.getName() + "]."); } final Field field = new Field(name, owner, generic == null ? type : generic, type, reflect, getter, setter); @@ -1493,14 +1819,14 @@ class Definition { if (!java.lang.reflect.Modifier.isFinal(modifiers)) { throw new IllegalArgumentException("Static [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to static Java field."); + " within the struct [" + owner.name + "] is not linked to static Java field."); } owner.statics.put(alias == null ? name : alias, field); } else { if (java.lang.reflect.Modifier.isStatic(modifiers)) { throw new IllegalArgumentException("Member [" + name + "]" + - " within the struct [" + owner.name + "] is not linked to non-static Java field."); + " within the struct [" + owner.name + "] is not linked to non-static Java field."); } owner.members.put(alias == null ? name : alias, field); @@ -1519,18 +1845,18 @@ class Definition { if (struct == null) { throw new IllegalArgumentException("Child struct [" + children[count] + "]" + - " not defined for copy to owner struct [" + owner.name + "]."); + " not defined for copy to owner struct [" + owner.name + "]."); } try { owner.clazz.asSubclass(child.clazz); } catch (ClassCastException exception) { throw new ClassCastException("Child struct [" + child.name + "]" + - " is not a super type of owner struct [" + owner.name + "] in copy."); + " is not a super type of owner struct [" + owner.name + "] in copy."); } final boolean object = child.clazz.equals(Object.class) && - java.lang.reflect.Modifier.isInterface(owner.clazz.getModifiers()); + java.lang.reflect.Modifier.isInterface(owner.clazz.getModifiers()); for (final Method method : child.methods.values()) { if (owner.methods.get(method.name) == null) { @@ -1543,22 +1869,22 @@ class Definition { reflect = clazz.getMethod(method.method.getName(), method.reflect.getParameterTypes()); } catch (NoSuchMethodException exception) { throw new IllegalArgumentException("Method [" + method.method.getName() + "] not found for" + - " class [" + owner.clazz.getName() + "] with arguments " + - Arrays.toString(method.reflect.getParameterTypes()) + "."); + " class [" + owner.clazz.getName() + "] with arguments " + + Arrays.toString(method.reflect.getParameterTypes()) + "."); } try { handle = MethodHandles.publicLookup().in(owner.clazz).findVirtual( - owner.clazz, method.method.getName(), - MethodType.methodType(method.reflect.getReturnType(), method.reflect.getParameterTypes())); + owner.clazz, method.method.getName(), + MethodType.methodType(method.reflect.getReturnType(), method.reflect.getParameterTypes())); } catch (NoSuchMethodException | IllegalAccessException exception) { throw new IllegalArgumentException("Method [" + method.method.getName() + "] not found for" + - " class [" + owner.clazz.getName() + "] with arguments " + - Arrays.toString(method.reflect.getParameterTypes()) + "."); + " class [" + owner.clazz.getName() + "] with arguments " + + Arrays.toString(method.reflect.getParameterTypes()) + "."); } owner.methods.put(method.name, - new Method(method.name, owner, method.rtn, method.arguments, method.method, reflect, handle)); + new Method(method.name, owner, method.rtn, method.arguments, method.method, reflect, handle)); } } @@ -1572,21 +1898,21 @@ class Definition { reflect = owner.clazz.getField(field.reflect.getName()); } catch (NoSuchFieldException exception) { throw new IllegalArgumentException("Field [" + field.reflect.getName() + "]" + - " not found for class [" + owner.clazz.getName() + "]."); + " not found for class [" + owner.clazz.getName() + "]."); } try { getter = MethodHandles.publicLookup().in(owner.clazz).findGetter( - owner.clazz, field.name, field.type.clazz); + owner.clazz, field.name, field.type.clazz); setter = MethodHandles.publicLookup().in(owner.clazz).findSetter( - owner.clazz, field.name, field.type.clazz); + owner.clazz, field.name, field.type.clazz); } catch (NoSuchFieldException | IllegalAccessException exception) { throw new IllegalArgumentException("Getter/Setter [" + field.name + "]" + - " not found for class [" + owner.clazz.getName() + "]."); + " not found for class [" + owner.clazz.getName() + "]."); } owner.members.put(field.name, - new Field(field.name, owner, field.type, field.generic, reflect, getter, setter)); + new Field(field.name, owner, field.type, field.generic, reflect, getter, setter)); } } } @@ -1598,19 +1924,19 @@ class Definition { if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for" + - " transform with cast type from [" + from.name + "] and cast type to [" + to.name + "]."); + " transform with cast type from [" + from.name + "] and cast type to [" + to.name + "]."); } if (from.equals(to)) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "] cannot" + - " have cast type from [" + from.name + "] be the same as cast type to [" + to.name + "]."); + " have cast type from [" + from.name + "] be the same as cast type to [" + to.name + "]."); } final Cast cast = new Cast(from, to); if (transforms.containsKey(cast)) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] already defined."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] already defined."); } Method method; @@ -1622,14 +1948,14 @@ class Definition { if (method == null) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using a function [" + name + "] that is not defined."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using a function [" + name + "] that is not defined."); } if (method.arguments.size() != 1) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using function [" + name + "] does not have a single type argument."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using function [" + name + "] does not have a single type argument."); } Type argument = method.arguments.get(0); @@ -1642,8 +1968,8 @@ class Definition { upcast = argument; } catch (ClassCastException cce1) { throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " function [" + name + "] cannot cast from type to the function input argument type."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + + " function [" + name + "] cannot cast from type to the function input argument type."); } } @@ -1657,8 +1983,8 @@ class Definition { downcast = to; } catch (ClassCastException cce1) { throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " function [" + name + "] cannot cast to type to the function return argument type."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + + " function [" + name + "] cannot cast to type to the function return argument type."); } } } else { @@ -1666,14 +1992,14 @@ class Definition { if (method == null) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using a method [" + name + "] that is not defined."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using a method [" + name + "] that is not defined."); } if (!method.arguments.isEmpty()) { throw new IllegalArgumentException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + - "] using method [" + name + "] does not have a single type argument."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + + "] using method [" + name + "] does not have a single type argument."); } try { @@ -1684,8 +2010,8 @@ class Definition { upcast = getType(owner.name); } catch (ClassCastException cce1) { throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + - " method [" + name + "] cannot cast from type to the method input argument type."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + "] using" + + " method [" + name + "] cannot cast from type to the method input argument type."); } } @@ -1699,8 +2025,8 @@ class Definition { downcast = to; } catch (ClassCastException cce1) { throw new ClassCastException("Transform with owner struct [" + owner.name + "]" + - " and cast type from [" + from.name + "] to cast type to [" + to.name + "]" + - " using method [" + name + "] cannot cast to type to the method return argument type."); + " and cast type from [" + from.name + "] to cast type to [" + to.name + "]" + + " using method [" + name + "] cannot cast to type to the method return argument type."); } } } @@ -1715,12 +2041,12 @@ class Definition { if (bounds.containsKey(pair0)) { throw new IllegalArgumentException( - "Bound already defined for types [" + type0.name + "] and [" + type1.name + "]."); + "Bound already defined for types [" + type0.name + "] and [" + type1.name + "]."); } if (bounds.containsKey(pair1)) { throw new IllegalArgumentException( - "Bound already defined for types [" + type1.name + "] and [" + type0.name + "]."); + "Bound already defined for types [" + type1.name + "] and [" + type0.name + "]."); } bounds.put(pair0, bound); @@ -1763,7 +2089,7 @@ class Definition { clazz = Class.forName(type.getInternalName().replace('/', '.')); } catch (ClassNotFoundException exception) { throw new IllegalArgumentException("The class [" + type.getInternalName() + "]" + - " could not be found to create type [" + name + "]."); + " could not be found to create type [" + name + "]."); } sort = Sort.ARRAY; diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java index 4963815f470..768b0aea2ec 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ErrorHandlingLexer.java @@ -1,5 +1,3 @@ -package org.elasticsearch.plan.a; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -19,6 +17,8 @@ package org.elasticsearch.plan.a; * under the License. */ +package org.elasticsearch.plan.a; + import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.LexerNoViableAltException; import org.antlr.v4.runtime.misc.Interval; diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Metadata.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Metadata.java new file mode 100644 index 00000000000..221b951497f --- /dev/null +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Metadata.java @@ -0,0 +1,619 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +import org.antlr.v4.runtime.ParserRuleContext; +import org.antlr.v4.runtime.tree.ParseTree; +import org.elasticsearch.plan.a.Definition.Cast; +import org.elasticsearch.plan.a.Definition.Type; +import org.elasticsearch.plan.a.PlanAParser.ExpressionContext; +import org.elasticsearch.plan.a.PlanAParser.PrecedenceContext; + +import java.util.HashMap; +import java.util.Map; + +/** + * Metadata is a wrapper for all the data that is collected by the {@link Analyzer}. Each node in the ANTLR parse tree + * will have one of the types of metadata to store information used either in a different node by the analyzer + * or by the {@link Writer} during byte code generation. Metadata also contains several objects passed into the + * {@link Analyzer} and {@link Writer} used during compilation including the {@link Definition}, the source code, + * the root of the ANTLR parse tree, and the {@link CompilerSettings}. + */ +class Metadata { + + /** + * StatementMetadata is used to store metadata mostly about + * control flow for ANTLR nodes related to if/else, do, while, for, etc. + */ + static class StatementMetadata { + /** + * The source variable is the ANTLR node used to generate this metadata. + */ + final ParserRuleContext source; + + /** + * The lastSource variable will be set to true when the final statement from the root ANTLR node is about + * to be visited. This is used to determine whether or not the auto-return feature is allowed to be used, + * and if a null return value needs to be generated automatically since a return value is always required. + */ + boolean lastSource = false; + + /** + * The beginLoop variable will be set to true whenever a loop node is initially visited including inner + * loops. This will not be propagated down the parse tree afterwards, though. This is used to determine + * whether or not inLoop should be set further down the tree. Note that inLoop alone is not enough + * information to determine whether we are in the last statement of a loop because we may inside of + * multiple loops, so this variable is necessary. + */ + boolean beginLoop = false; + + /** + * The inLoop variable is set to true when inside a loop. This will be propagated down the parse tree. This + * is used to determine whether or not continue and break statements are legal. + */ + boolean inLoop = false; + + /** + * The lastLoop variable is set to true when the final statement of a loop is reached. This will be + * propagated down the parse tree until another loop is reached and then will not be propagated further for + * the current loop. This is used to determine whether or not a continue statement is superfluous. + */ + boolean lastLoop = false; + + /** + * The methodEscape variable is set to true when a statement would cause the method to potentially exit. This + * includes return, throw, and continuous loop statements. Note that a catch statement may possibly + * reset this to false after a throw statement. This will be propagated up the tree as far as necessary. + * This is used by the {@link Writer} to ensure that superfluous statements aren't unnecessarily written + * into generated bytecode. + */ + boolean methodEscape = false; + + /** + * The loopEscape variable is set to true when a loop is going to be exited. This may be caused by a number of + * different statements including continue, break, return, etc. This will only be propagated as far as the + * loop node. This is used to ensure that in certain case an infinite loop will be caught at + * compile-time rather than run-time. + */ + boolean loopEscape = false; + + /** + * The allLast variable is set whenever a final statement in a block is reached. This includes the end of loop, + * if, else, etc. This will be only propagated to the top of the block statement ANTLR node. + * This is used to ensure that there are no unreachable statements within the script. + */ + boolean allLast = false; + + /** + * The anyContinue will be set to true when a continue statement is visited. This will be propagated to the + * loop node it's within. This is used to ensure that in certain case an infinite loop will be caught at + * compile-time rather than run-time. + */ + boolean anyContinue = false; + + /** + * The anyBreak will be set to true when a break statement is visited. This will be propagated to the + * loop node it's within. This is used to in conjunction with methodEscape to ensure there are no unreachable + * statements within the script. + */ + boolean anyBreak = false; + + /** + * The count variable is used as a rudimentary count of statements within a loop. This will be used in + * the {@link Writer} to keep a count of statements that have been executed at run-time to ensure that a loop + * will exit if it runs too long. + */ + int count = 0; + + /** + * The exception variable is used to store the exception type when a throw node is visited. This is used by + * the {@link Writer} to write the correct type of exception in the generated byte code. + */ + Type exception = null; + + /** + * The slot variable is used to store the place on the stack of where a thrown exception will be stored to. + * This is used by the {@link Writer}. + */ + int slot = -1; + + /** + * Constructor. + * @param source The associated ANTLR node. + */ + private StatementMetadata(final ParserRuleContext source) { + this.source = source; + } + } + + /** + * ExpressionMetadata is used to store metadata mostly about constants and casting + * for ANTLR nodes related to mathematical operations. + */ + static class ExpressionMetadata { + /** + * The source variable is the ANTLR node used to generate this metadata. + */ + final ParserRuleContext source; + + /** + * The read variable is used to determine whether or not the value of an expression will be read from. + * This is set to false when the expression is the left-hand side of an assignment that is not chained or + * when a method call is made alone. This will propagate down the tree as far as necessary. + * The {@link Writer} uses this to determine when a value may need to be popped from the stack + * such as when a method call returns a value that is never read. + */ + boolean read = true; + + /** + * The statement variable is set true when an expression is a complete meaning that there is some sort + * of effect on a variable or a method call is made. This will propagate up the tree as far as necessary. + * This prevents statements that have no effect on the output of a script from being executed. + */ + boolean statement = false; + + /** + * The preConst variable is set to a non-null value when a constant statement is made in a script. This is + * used to track the constant value prior to any casts being made on an ANTLR node. + */ + Object preConst = null; + + /** + * The postConst variable is set to a non-null value when a cast is made on a node where a preConst variable + * has already been set when the cast would leave the constant as a non-object value except in the case of a + * String. This will be propagated up the tree and used to simplify constants when possible such as making + * the value of 2*2 be 4 in the * node, so that the {@link Writer} only has to push a 4 onto the stack. + */ + Object postConst = null; + + /** + * The isNull variable is set to true when a null constant statement is made in the script. This allows the + * {@link Writer} to potentially shortcut certain comparison operations. + */ + boolean isNull = false; + + /** + * The to variable is used to track what an ANTLR node's value should be cast to. This is set on every ANTLR + * node in the tree, and used by the {@link Writer} to make a run-time cast if necessary in the byte code. + * This is also used by the {@link Analyzer} to determine if a cast is legal. + */ + Type to = null; + + /** + * The from variable is used to track what an ANTLR node's value should be cast from. This is set on every + * ANTLR node in the tree independent of other nodes. This is used by the {@link Analyzer} to determine if a + * cast is legal. + */ + Type from = null; + + /** + * The explicit variable is set to true when a cast is explicitly made in the script. This tracks whether + * or not a cast is a legal up cast. + */ + boolean explicit = false; + + /** + * The typesafe variable is set to true when a dynamic type is used as part of an expression. This propagates + * up the tree to the top of the expression. This allows for implicit up casts throughout the expression and + * is used by the {@link Analyzer}. + */ + boolean typesafe = true; + + /** + * This is set to the combination of the to and from variables at the end of each node visit in the + * {@link Analyzer}. This is set on every ANTLR node in the tree independent of other nodes, and is + * used by {@link Writer} to make a run-time cast if necessary in the byte code. + */ + Cast cast = null; + + /** + * Constructor. + * @param source The associated ANTLR node. + */ + private ExpressionMetadata(final ParserRuleContext source) { + this.source = source; + } + } + + /** + * ExternalMetadata is used to store metadata about the overall state of a variable/method chain such as + * '(int)x.get(3)' where each piece of that chain is broken into it's indiviual pieces and stored in + * {@link ExtNodeMetadata}. + */ + static class ExternalMetadata { + /** + * The source variable is the ANTLR node used to generate this metadata. + */ + final ParserRuleContext source; + + /** + * The read variable is set to true when the value of a variable/method chain is going to be read from. + * This is used by the {@link Analyzer} to determine if this variable/method chain will be in a standalone + * statement. + */ + boolean read = false; + + /** + * The storeExpr variable is set to the right-hand side of an assignment in the variable/method chain if + * necessary. This is used by the {@link Analyzer} to set the proper metadata for a read versus a write, + * and is used by the {@link Writer} to determine if a bytecode operation should be a load or a store. + */ + ParserRuleContext storeExpr = null; + + /** + * The token variable is set to a constant value of the operator type (+, -, etc.) when a compound assignment + * is being visited. This is also used by the increment and decrement operators. This is used by both the + * {@link Analyzer} and {@link Writer} to correctly handle the compound assignment. + */ + int token = 0; + + /** + * The pre variable is set to true when pre-increment or pre-decrement is visited. This is used by both the + * {@link Analyzer} and {@link Writer} to correctly handle any reads of the variable/method chain that are + * necessary. + */ + boolean pre = false; + + /** + * The post variable is set to true when post-increment or post-decrement is visited. This is used by both the + * {@link Analyzer} and {@link Writer} to correctly handle any reads of the variable/method chain that are + * necessary. + */ + boolean post = false; + + /** + * The scope variable is incremented and decremented when a precedence node is visited as part of a + * variable/method chain. This is used by the {@link Analyzer} to determine when the final piece of the + * variable/method chain has been reached. + */ + int scope = 0; + + /** + * The current variable is set to whatever the current type is within the visited node of the variable/method + * chain. This changes as the nodes for the variable/method are walked through. This is used by the + * {@link Analyzer} to make decisions about whether or not a cast is legal, and what methods are available + * for that specific type. + */ + Type current = null; + + /** + * The statik variable is set to true when a variable/method chain begins with static type. This is used by + * the {@link Analyzer} to determine what methods/members are available for that specific type. + */ + boolean statik = false; + + /** + * The statement variable is set to true when a variable/method chain can be standalone statement. This is + * used by the {@link Analyzer} to error out if there a variable/method chain that is not a statement. + */ + boolean statement = false; + + /** + * The constant variable is set when a String constant is part of the variable/method chain. String is a + * special case because methods/members need to be able to be called on a String constant, so this can't be + * only as part of {@link ExpressionMetadata}. This is used by the {@link Writer} to write out the String + * constant in the byte code. + */ + Object constant = null; + + /** + * Constructor. + * @param source The associated ANTLR node. + */ + private ExternalMetadata(final ParserRuleContext source) { + this.source = source; + } + } + + static class ExtNodeMetadata { + /** + * The parent variable is top-level ANTLR node of the variable/method chain. This is used to retrieve the + * ExternalMetadata for the variable/method chain this ExtNodeMetadata is a piece of. + */ + final ParserRuleContext parent; + + /** + * The source variable is the ANTLR node used to generate this metadata. + */ + final ParserRuleContext source; + + /** + * The target variable is set to a value based on the type of ANTLR node that is visited. This is used by + * {@link Writer} to determine whether a cast, store, load, or method call should be written in byte code + * depending on what the target variable is. + */ + Object target = null; + + /** + * The last variable is set to true when the last ANTLR node of the variable/method chain is visted. This is + * used by the {@link Writer} in conjuction with the storeExpr variable to determine whether or not a store + * needs to be written as opposed to a load. + */ + boolean last = false; + + /** + * The type variable is set to the type that a visited node ends with. This is used by both the + * {@link Analyzer} and {@link Writer} to make decisions about compound assignments, String constants, and + * shortcuts. + */ + Type type = null; + + /** + * The promote variable is set to the type of a promotion within a compound assignment. Compound assignments + * may require promotion between the left-hand side variable and right-hand side value. This is used by the + * {@link Writer} to make the correct decision about the byte code operation. + */ + Type promote = null; + + /** + * The castFrom variable is set during a compound assignment. This is used by the {@link Writer} to + * cast the values to the promoted type during a compound assignment. + */ + Cast castFrom = null; + + /** + * The castTo variable is set during an explicit cast in a variable/method chain or during a compound + * assignment. This is used by the {@link Writer} to either do an explicit cast, or cast the values + * from the promoted type back to the original type during a compound assignment. + */ + Cast castTo = null; + + /** + * Constructor. + * @param parent The top-level ANTLR node for the variable/method chain. + * @param source The associated ANTLR node. + */ + private ExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) { + this.parent = parent; + this.source = source; + } + } + + /** + * A utility method to output consistent error messages. + * @param ctx The ANTLR node the error occurred in. + * @return The error message with tacked on line number and character position. + */ + static String error(final ParserRuleContext ctx) { + return "Error [" + ctx.getStart().getLine() + ":" + ctx.getStart().getCharPositionInLine() + "]: "; + } + + /** + * Acts as both the Plan A API and white-list for what types and methods are allowed. + */ + final Definition definition; + + /** + * The original text of the input script. This is used to write out the source code into + * the byte code file for debugging purposes. + */ + final String source; + + /** + * Toot node of the ANTLR tree for the Plan A script. + */ + final ParserRuleContext root; + + /** + * Used to determine certain compile-time constraints such as whether or not numeric overflow is allowed + * and how many statements are allowed before a loop will throw an exception. + */ + final CompilerSettings settings; + + /** + * Used to determine what slot the input variable is stored in. This is used in the {@link Writer} whenever + * the input variable is accessed. + */ + int inputValueSlot = -1; + + /** + * Used to determine what slot the score variable is stored in. This is used in the {@link Writer} whenever + * the score variable is accessed. + */ + int scoreValueSlot = -1; + + /** + * Used to determine what slot the loopCounter variable is stored in. This is used n the {@link Writer} whenever + * the loop variable is accessed. + */ + int loopCounterSlot = -1; + + /** + * Maps the relevant ANTLR node to its metadata. + */ + private final Map statementMetadata = new HashMap<>(); + + /** + * Maps the relevant ANTLR node to its metadata. + */ + private final Map expressionMetadata = new HashMap<>(); + + /** + * Maps the relevant ANTLR node to its metadata. + */ + private final Map externalMetadata = new HashMap<>(); + + /** + * Maps the relevant ANTLR node to its metadata. + */ + private final Map extNodeMetadata = new HashMap<>(); + + /** + * Constructor. + * @param definition The Plan A definition. + * @param source The source text for the script. + * @param root The root ANTLR node. + * @param settings The compile-time settings. + */ + Metadata(final Definition definition, final String source, final ParserRuleContext root, final CompilerSettings settings) { + this.definition = definition; + this.source = source; + this.root = root; + this.settings = settings; + } + + /** + * Creates a new StatementMetadata and stores it in the statementMetadata map. + * @param source The ANTLR node for this metadata. + * @return The new StatementMetadata. + */ + StatementMetadata createStatementMetadata(final ParserRuleContext source) { + final StatementMetadata sourcesmd = new StatementMetadata(source); + statementMetadata.put(source, sourcesmd); + + return sourcesmd; + } + + /** + * Retrieves StatementMetadata from the statementMetadata map. + * @param source The ANTLR node for this metadata. + * @return The retrieved StatementMetadata. + */ + StatementMetadata getStatementMetadata(final ParserRuleContext source) { + final StatementMetadata sourcesmd = statementMetadata.get(source); + + if (sourcesmd == null) { + throw new IllegalStateException(error(source) + "Statement metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourcesmd; + } + + /** + * The ANTLR parse tree is modified in one single case; a parent node needs to check a child node to see if it's + * a precedence node, and if so, it must be removed from the tree permanently. Once the ANTLR tree is built, + * precedence nodes are no longer necessary to maintain the correct ordering of the tree, so they only + * add a level of indirection where complicated decisions about metadata passing would have to be made. This + * method removes the need for those decisions. + * @param source The child ANTLR node to check for precedence. + * @return The updated child ANTLR node. + */ + ExpressionContext updateExpressionTree(ExpressionContext source) { + // Check to see if the ANTLR node is a precedence node. + if (source instanceof PrecedenceContext) { + final ParserRuleContext parent = source.getParent(); + int index = 0; + + // Mark the index of the source node within the list of child nodes from the parent. + for (final ParseTree child : parent.children) { + if (child == source) { + break; + } + + ++index; + } + + // If there are multiple precedence nodes in a row, remove them all. + while (source instanceof PrecedenceContext) { + source = ((PrecedenceContext)source).expression(); + } + + // Update the parent node with the child of the precedence node. + parent.children.set(index, source); + } + + return source; + } + + /** + * Creates a new ExpressionMetadata and stores it in the expressionMetadata map. + * @param source The ANTLR node for this metadata. + * @return The new ExpressionMetadata. + */ + ExpressionMetadata createExpressionMetadata(ParserRuleContext source) { + final ExpressionMetadata sourceemd = new ExpressionMetadata(source); + expressionMetadata.put(source, sourceemd); + + return sourceemd; + } + + /** + * Retrieves ExpressionMetadata from the expressionMetadata map. + * @param source The ANTLR node for this metadata. + * @return The retrieved ExpressionMetadata. + */ + ExpressionMetadata getExpressionMetadata(final ParserRuleContext source) { + final ExpressionMetadata sourceemd = expressionMetadata.get(source); + + if (sourceemd == null) { + throw new IllegalStateException(error(source) + "Expression metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourceemd; + } + + /** + * Creates a new ExternalMetadata and stores it in the externalMetadata map. + * @param source The ANTLR node for this metadata. + * @return The new ExternalMetadata. + */ + ExternalMetadata createExternalMetadata(final ParserRuleContext source) { + final ExternalMetadata sourceemd = new ExternalMetadata(source); + externalMetadata.put(source, sourceemd); + + return sourceemd; + } + + /** + * Retrieves ExternalMetadata from the externalMetadata map. + * @param source The ANTLR node for this metadata. + * @return The retrieved ExternalMetadata. + */ + ExternalMetadata getExternalMetadata(final ParserRuleContext source) { + final ExternalMetadata sourceemd = externalMetadata.get(source); + + if (sourceemd == null) { + throw new IllegalStateException(error(source) + "External metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourceemd; + } + + /** + * Creates a new ExtNodeMetadata and stores it in the extNodeMetadata map. + * @param source The ANTLR node for this metadata. + * @return The new ExtNodeMetadata. + */ + ExtNodeMetadata createExtNodeMetadata(final ParserRuleContext parent, final ParserRuleContext source) { + final ExtNodeMetadata sourceemd = new ExtNodeMetadata(parent, source); + extNodeMetadata.put(source, sourceemd); + + return sourceemd; + } + + /** + * Retrieves ExtNodeMetadata from the extNodeMetadata map. + * @param source The ANTLR node for this metadata. + * @return The retrieved ExtNodeMetadata. + */ + ExtNodeMetadata getExtNodeMetadata(final ParserRuleContext source) { + final ExtNodeMetadata sourceemd = extNodeMetadata.get(source); + + if (sourceemd == null) { + throw new IllegalStateException(error(source) + "External metadata does not exist at" + + " the parse node with text [" + source.getText() + "]."); + } + + return sourceemd; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentTypeListener.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAError.java similarity index 55% rename from core/src/main/java/org/elasticsearch/index/mapper/DocumentTypeListener.java rename to plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAError.java index ceb79df17f7..dea3c2021a8 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentTypeListener.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAError.java @@ -17,17 +17,21 @@ * under the License. */ -package org.elasticsearch.index.mapper; +package org.elasticsearch.plan.a; /** + * The PlanAError class is used to throw internal errors caused by Plan A scripts that cannot be + * caught using a standard {@link Exception}. This prevents the user from catching this specific error + * (as Exceptions are available in the Plan A API, but Errors are not,) and possibly continuing to do + * something hazardous. The alternative was extending {@link Throwable}, but that seemed worse than using + * an {@link Error} in this case. */ -public interface DocumentTypeListener { - +public class PlanAError extends Error { /** - * Invoked just before a new document type has been created. - * - * @param mapper The new document mapper of the type being added + * Constructor. + * @param message The error message. */ - void beforeCreate(DocumentMapper mapper); - + public PlanAError(final String message) { + super(message); + } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java index da9943385c0..e7de571ef2e 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParser.java @@ -40,17 +40,17 @@ class PlanAParser extends Parser { STRING=68, CHAR=69, TRUE=70, FALSE=71, NULL=72, TYPE=73, ID=74, EXTINTEGER=75, EXTID=76; public static final int - RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_initializer = 4, - RULE_afterthought = 5, RULE_declaration = 6, RULE_decltype = 7, RULE_declvar = 8, - RULE_expression = 9, RULE_extstart = 10, RULE_extprec = 11, RULE_extcast = 12, - RULE_extbrace = 13, RULE_extdot = 14, RULE_exttype = 15, RULE_extcall = 16, - RULE_extvar = 17, RULE_extfield = 18, RULE_extnew = 19, RULE_extstring = 20, - RULE_arguments = 21, RULE_increment = 22; + RULE_source = 0, RULE_statement = 1, RULE_block = 2, RULE_empty = 3, RULE_emptyscope = 4, + RULE_initializer = 5, RULE_afterthought = 6, RULE_declaration = 7, RULE_decltype = 8, + RULE_declvar = 9, RULE_trap = 10, RULE_expression = 11, RULE_extstart = 12, + RULE_extprec = 13, RULE_extcast = 14, RULE_extbrace = 15, RULE_extdot = 16, + RULE_exttype = 17, RULE_extcall = 18, RULE_extvar = 19, RULE_extfield = 20, + RULE_extnew = 21, RULE_extstring = 22, RULE_arguments = 23, RULE_increment = 24; public static final String[] ruleNames = { - "source", "statement", "block", "empty", "initializer", "afterthought", - "declaration", "decltype", "declvar", "expression", "extstart", "extprec", - "extcast", "extbrace", "extdot", "exttype", "extcall", "extvar", "extfield", - "extnew", "extstring", "arguments", "increment" + "source", "statement", "block", "empty", "emptyscope", "initializer", + "afterthought", "declaration", "decltype", "declvar", "trap", "expression", + "extstart", "extprec", "extcast", "extbrace", "extdot", "exttype", "extcall", + "extvar", "extfield", "extnew", "extstring", "arguments", "increment" }; private static final String[] _LITERAL_NAMES = { @@ -149,21 +149,21 @@ class PlanAParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(47); + setState(51); _errHandler.sync(this); _la = _input.LA(1); do { { { - setState(46); + setState(50); statement(); } } - setState(49); + setState(53); _errHandler.sync(this); _la = _input.LA(1); } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0) ); - setState(51); + setState(55); match(EOF); } } @@ -266,31 +266,14 @@ class PlanAParser extends Parser { } public static class TryContext extends StatementContext { public TerminalNode TRY() { return getToken(PlanAParser.TRY, 0); } - public List block() { - return getRuleContexts(BlockContext.class); + public BlockContext block() { + return getRuleContext(BlockContext.class,0); } - public BlockContext block(int i) { - return getRuleContext(BlockContext.class,i); + public List trap() { + return getRuleContexts(TrapContext.class); } - public List CATCH() { return getTokens(PlanAParser.CATCH); } - public TerminalNode CATCH(int i) { - return getToken(PlanAParser.CATCH, i); - } - public List LP() { return getTokens(PlanAParser.LP); } - public TerminalNode LP(int i) { - return getToken(PlanAParser.LP, i); - } - public List RP() { return getTokens(PlanAParser.RP); } - public TerminalNode RP(int i) { - return getToken(PlanAParser.RP, i); - } - public List TYPE() { return getTokens(PlanAParser.TYPE); } - public TerminalNode TYPE(int i) { - return getToken(PlanAParser.TYPE, i); - } - public List ID() { return getTokens(PlanAParser.ID); } - public TerminalNode ID(int i) { - return getToken(PlanAParser.ID, i); + public TrapContext trap(int i) { + return getRuleContext(TrapContext.class,i); } public TryContext(StatementContext ctx) { copyFrom(ctx); } @Override @@ -391,29 +374,29 @@ class PlanAParser extends Parser { int _la; try { int _alt; - setState(136); + setState(134); switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { case 1: _localctx = new IfContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(53); - match(IF); - setState(54); - match(LP); - setState(55); - expression(0); - setState(56); - match(RP); setState(57); - block(); + match(IF); + setState(58); + match(LP); + setState(59); + expression(0); setState(60); + match(RP); + setState(61); + block(); + setState(64); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: { - setState(58); + setState(62); match(ELSE); - setState(59); + setState(63); block(); } break; @@ -424,58 +407,28 @@ class PlanAParser extends Parser { _localctx = new WhileContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(62); + setState(66); match(WHILE); - setState(63); + setState(67); match(LP); - setState(64); - expression(0); - setState(65); - match(RP); setState(68); - switch (_input.LA(1)) { - case LBRACK: - case LP: - case IF: - case WHILE: - case DO: - case FOR: - case CONTINUE: - case BREAK: - case RETURN: - case NEW: - case TRY: - case THROW: - case BOOLNOT: - case BWNOT: - case ADD: - case SUB: - case INCR: - case DECR: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case CHAR: - case TRUE: - case FALSE: - case NULL: - case TYPE: - case ID: + expression(0); + setState(69); + match(RP); + setState(72); + switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { + case 1: { - setState(66); + setState(70); block(); } break; - case SEMICOLON: + case 2: { - setState(67); + setState(71); empty(); } break; - default: - throw new NoViableAltException(this); } } break; @@ -483,23 +436,23 @@ class PlanAParser extends Parser { _localctx = new DoContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(70); - match(DO); - setState(71); - block(); - setState(72); - match(WHILE); - setState(73); - match(LP); setState(74); - expression(0); + match(DO); setState(75); - match(RP); + block(); + setState(76); + match(WHILE); setState(77); + match(LP); + setState(78); + expression(0); + setState(79); + match(RP); + setState(81); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(76); + setState(80); match(SEMICOLON); } } @@ -510,27 +463,16 @@ class PlanAParser extends Parser { _localctx = new ForContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(79); + setState(83); match(FOR); - setState(80); - match(LP); - setState(82); - _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { - { - setState(81); - initializer(); - } - } - setState(84); - match(SEMICOLON); + match(LP); setState(86); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { { setState(85); - expression(0); + initializer(); } } @@ -541,56 +483,37 @@ class PlanAParser extends Parser { if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { { setState(89); - afterthought(); + expression(0); } } setState(92); - match(RP); - setState(95); - switch (_input.LA(1)) { - case LBRACK: - case LP: - case IF: - case WHILE: - case DO: - case FOR: - case CONTINUE: - case BREAK: - case RETURN: - case NEW: - case TRY: - case THROW: - case BOOLNOT: - case BWNOT: - case ADD: - case SUB: - case INCR: - case DECR: - case OCTAL: - case HEX: - case INTEGER: - case DECIMAL: - case STRING: - case CHAR: - case TRUE: - case FALSE: - case NULL: - case TYPE: - case ID: + match(SEMICOLON); + setState(94); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { { setState(93); + afterthought(); + } + } + + setState(96); + match(RP); + setState(99); + switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { + case 1: + { + setState(97); block(); } break; - case SEMICOLON: + case 2: { - setState(94); + setState(98); empty(); } break; - default: - throw new NoViableAltException(this); } } break; @@ -598,25 +521,8 @@ class PlanAParser extends Parser { _localctx = new DeclContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(97); - declaration(); - setState(99); - _la = _input.LA(1); - if (_la==SEMICOLON) { - { - setState(98); - match(SEMICOLON); - } - } - - } - break; - case 6: - _localctx = new ContinueContext(_localctx); - enterOuterAlt(_localctx, 6); - { setState(101); - match(CONTINUE); + declaration(); setState(103); _la = _input.LA(1); if (_la==SEMICOLON) { @@ -628,12 +534,12 @@ class PlanAParser extends Parser { } break; - case 7: - _localctx = new BreakContext(_localctx); - enterOuterAlt(_localctx, 7); + case 6: + _localctx = new ContinueContext(_localctx); + enterOuterAlt(_localctx, 6); { setState(105); - match(BREAK); + match(CONTINUE); setState(107); _la = _input.LA(1); if (_la==SEMICOLON) { @@ -643,21 +549,38 @@ class PlanAParser extends Parser { } } + } + break; + case 7: + _localctx = new BreakContext(_localctx); + enterOuterAlt(_localctx, 7); + { + setState(109); + match(BREAK); + setState(111); + _la = _input.LA(1); + if (_la==SEMICOLON) { + { + setState(110); + match(SEMICOLON); + } + } + } break; case 8: _localctx = new ReturnContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(109); + setState(113); match(RETURN); - setState(110); + setState(114); expression(0); - setState(112); + setState(116); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(111); + setState(115); match(SEMICOLON); } } @@ -668,11 +591,11 @@ class PlanAParser extends Parser { _localctx = new TryContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(114); + setState(118); match(TRY); - setState(115); + setState(119); block(); - setState(123); + setState(121); _errHandler.sync(this); _alt = 1; do { @@ -680,27 +603,15 @@ class PlanAParser extends Parser { case 1: { { - setState(116); - match(CATCH); - setState(117); - match(LP); - { - setState(118); - match(TYPE); - setState(119); - match(ID); - } - setState(121); - match(RP); - setState(122); - block(); + setState(120); + trap(); } } break; default: throw new NoViableAltException(this); } - setState(125); + setState(123); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,12,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); @@ -710,15 +621,15 @@ class PlanAParser extends Parser { _localctx = new ThrowContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(127); + setState(125); match(THROW); - setState(128); + setState(126); expression(0); - setState(130); + setState(128); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(129); + setState(127); match(SEMICOLON); } } @@ -729,13 +640,13 @@ class PlanAParser extends Parser { _localctx = new ExprContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(132); + setState(130); expression(0); - setState(134); + setState(132); _la = _input.LA(1); if (_la==SEMICOLON) { { - setState(133); + setState(131); match(SEMICOLON); } } @@ -799,29 +710,29 @@ class PlanAParser extends Parser { enterRule(_localctx, 4, RULE_block); int _la; try { - setState(147); + setState(145); switch (_input.LA(1)) { case LBRACK: _localctx = new MultipleContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(138); + setState(136); match(LBRACK); - setState(142); + setState(138); _errHandler.sync(this); _la = _input.LA(1); - while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { + do { { { - setState(139); + setState(137); statement(); } } - setState(144); + setState(140); _errHandler.sync(this); _la = _input.LA(1); - } - setState(145); + } while ( (((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << IF) | (1L << WHILE) | (1L << DO) | (1L << FOR) | (1L << CONTINUE) | (1L << BREAK) | (1L << RETURN) | (1L << NEW) | (1L << TRY) | (1L << THROW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0) ); + setState(142); match(RBRACK); } break; @@ -856,7 +767,7 @@ class PlanAParser extends Parser { _localctx = new SingleContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(146); + setState(144); statement(); } break; @@ -876,6 +787,9 @@ class PlanAParser extends Parser { } public static class EmptyContext extends ParserRuleContext { + public EmptyscopeContext emptyscope() { + return getRuleContext(EmptyscopeContext.class,0); + } public TerminalNode SEMICOLON() { return getToken(PlanAParser.SEMICOLON, 0); } public EmptyContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -891,11 +805,62 @@ class PlanAParser extends Parser { public final EmptyContext empty() throws RecognitionException { EmptyContext _localctx = new EmptyContext(_ctx, getState()); enterRule(_localctx, 6, RULE_empty); + try { + setState(149); + switch (_input.LA(1)) { + case LBRACK: + enterOuterAlt(_localctx, 1); + { + setState(147); + emptyscope(); + } + break; + case SEMICOLON: + enterOuterAlt(_localctx, 2); + { + setState(148); + match(SEMICOLON); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class EmptyscopeContext extends ParserRuleContext { + public TerminalNode LBRACK() { return getToken(PlanAParser.LBRACK, 0); } + public TerminalNode RBRACK() { return getToken(PlanAParser.RBRACK, 0); } + public EmptyscopeContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_emptyscope; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitEmptyscope(this); + else return visitor.visitChildren(this); + } + } + + public final EmptyscopeContext emptyscope() throws RecognitionException { + EmptyscopeContext _localctx = new EmptyscopeContext(_ctx, getState()); + enterRule(_localctx, 8, RULE_emptyscope); try { enterOuterAlt(_localctx, 1); { - setState(149); - match(SEMICOLON); + setState(151); + match(LBRACK); + setState(152); + match(RBRACK); } } catch (RecognitionException re) { @@ -929,21 +894,21 @@ class PlanAParser extends Parser { public final InitializerContext initializer() throws RecognitionException { InitializerContext _localctx = new InitializerContext(_ctx, getState()); - enterRule(_localctx, 8, RULE_initializer); + enterRule(_localctx, 10, RULE_initializer); try { - setState(153); - switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) { + setState(156); + switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(151); + setState(154); declaration(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(152); + setState(155); expression(0); } break; @@ -977,11 +942,11 @@ class PlanAParser extends Parser { public final AfterthoughtContext afterthought() throws RecognitionException { AfterthoughtContext _localctx = new AfterthoughtContext(_ctx, getState()); - enterRule(_localctx, 10, RULE_afterthought); + enterRule(_localctx, 12, RULE_afterthought); try { enterOuterAlt(_localctx, 1); { - setState(155); + setState(158); expression(0); } } @@ -1023,28 +988,28 @@ class PlanAParser extends Parser { public final DeclarationContext declaration() throws RecognitionException { DeclarationContext _localctx = new DeclarationContext(_ctx, getState()); - enterRule(_localctx, 12, RULE_declaration); + enterRule(_localctx, 14, RULE_declaration); int _la; try { enterOuterAlt(_localctx, 1); { - setState(157); + setState(160); decltype(); - setState(158); + setState(161); declvar(); - setState(163); + setState(166); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(159); + setState(162); match(COMMA); - setState(160); + setState(163); declvar(); } } - setState(165); + setState(168); _errHandler.sync(this); _la = _input.LA(1); } @@ -1084,26 +1049,26 @@ class PlanAParser extends Parser { public final DecltypeContext decltype() throws RecognitionException { DecltypeContext _localctx = new DecltypeContext(_ctx, getState()); - enterRule(_localctx, 14, RULE_decltype); + enterRule(_localctx, 16, RULE_decltype); int _la; try { enterOuterAlt(_localctx, 1); { - setState(166); + setState(169); match(TYPE); - setState(171); + setState(174); _errHandler.sync(this); _la = _input.LA(1); while (_la==LBRACE) { { { - setState(167); + setState(170); match(LBRACE); - setState(168); + setState(171); match(RBRACE); } } - setState(173); + setState(176); _errHandler.sync(this); _la = _input.LA(1); } @@ -1139,20 +1104,20 @@ class PlanAParser extends Parser { public final DeclvarContext declvar() throws RecognitionException { DeclvarContext _localctx = new DeclvarContext(_ctx, getState()); - enterRule(_localctx, 16, RULE_declvar); + enterRule(_localctx, 18, RULE_declvar); int _la; try { enterOuterAlt(_localctx, 1); { - setState(174); - match(ID); setState(177); + match(ID); + setState(180); _la = _input.LA(1); if (_la==ASSIGN) { { - setState(175); + setState(178); match(ASSIGN); - setState(176); + setState(179); expression(0); } } @@ -1170,6 +1135,75 @@ class PlanAParser extends Parser { return _localctx; } + public static class TrapContext extends ParserRuleContext { + public TerminalNode CATCH() { return getToken(PlanAParser.CATCH, 0); } + public TerminalNode LP() { return getToken(PlanAParser.LP, 0); } + public TerminalNode RP() { return getToken(PlanAParser.RP, 0); } + public TerminalNode TYPE() { return getToken(PlanAParser.TYPE, 0); } + public TerminalNode ID() { return getToken(PlanAParser.ID, 0); } + public BlockContext block() { + return getRuleContext(BlockContext.class,0); + } + public EmptyscopeContext emptyscope() { + return getRuleContext(EmptyscopeContext.class,0); + } + public TrapContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_trap; } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof PlanAParserVisitor ) return ((PlanAParserVisitor)visitor).visitTrap(this); + else return visitor.visitChildren(this); + } + } + + public final TrapContext trap() throws RecognitionException { + TrapContext _localctx = new TrapContext(_ctx, getState()); + enterRule(_localctx, 20, RULE_trap); + try { + enterOuterAlt(_localctx, 1); + { + setState(182); + match(CATCH); + setState(183); + match(LP); + { + setState(184); + match(TYPE); + setState(185); + match(ID); + } + setState(187); + match(RP); + setState(190); + switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { + case 1: + { + setState(188); + block(); + } + break; + case 2: + { + setState(189); + emptyscope(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public static class ExpressionContext extends ParserRuleContext { public ExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -1427,29 +1461,29 @@ class PlanAParser extends Parser { int _parentState = getState(); ExpressionContext _localctx = new ExpressionContext(_ctx, _parentState); ExpressionContext _prevctx = _localctx; - int _startState = 18; - enterRecursionRule(_localctx, 18, RULE_expression, _p); + int _startState = 22; + enterRecursionRule(_localctx, 22, RULE_expression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(207); - switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { + setState(220); + switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { _localctx = new UnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(180); + setState(193); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(181); + setState(194); expression(14); } break; @@ -1458,13 +1492,13 @@ class PlanAParser extends Parser { _localctx = new CastContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(182); + setState(195); match(LP); - setState(183); + setState(196); decltype(); - setState(184); + setState(197); match(RP); - setState(185); + setState(198); expression(13); } break; @@ -1473,16 +1507,16 @@ class PlanAParser extends Parser { _localctx = new AssignmentContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(187); + setState(200); extstart(); - setState(188); + setState(201); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ASSIGN) | (1L << AADD) | (1L << ASUB) | (1L << AMUL) | (1L << ADIV) | (1L << AREM) | (1L << AAND) | (1L << AXOR) | (1L << AOR) | (1L << ALSH) | (1L << ARSH) | (1L << AUSH))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(189); + setState(202); expression(1); } break; @@ -1491,11 +1525,11 @@ class PlanAParser extends Parser { _localctx = new PrecedenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(191); + setState(204); match(LP); - setState(192); + setState(205); expression(0); - setState(193); + setState(206); match(RP); } break; @@ -1504,7 +1538,7 @@ class PlanAParser extends Parser { _localctx = new NumericContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(195); + setState(208); _la = _input.LA(1); if ( !(((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)))) != 0)) ) { _errHandler.recoverInline(this); @@ -1518,7 +1552,7 @@ class PlanAParser extends Parser { _localctx = new CharContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(196); + setState(209); match(CHAR); } break; @@ -1527,7 +1561,7 @@ class PlanAParser extends Parser { _localctx = new TrueContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(197); + setState(210); match(TRUE); } break; @@ -1536,7 +1570,7 @@ class PlanAParser extends Parser { _localctx = new FalseContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(198); + setState(211); match(FALSE); } break; @@ -1545,7 +1579,7 @@ class PlanAParser extends Parser { _localctx = new NullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(199); + setState(212); match(NULL); } break; @@ -1554,9 +1588,9 @@ class PlanAParser extends Parser { _localctx = new PostincContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(200); + setState(213); extstart(); - setState(201); + setState(214); increment(); } break; @@ -1565,9 +1599,9 @@ class PlanAParser extends Parser { _localctx = new PreincContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(203); + setState(216); increment(); - setState(204); + setState(217); extstart(); } break; @@ -1576,36 +1610,36 @@ class PlanAParser extends Parser { _localctx = new ExternalContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(206); + setState(219); extstart(); } break; } _ctx.stop = _input.LT(-1); - setState(247); + setState(260); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(245); - switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { + setState(258); + switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(209); + setState(222); if (!(precpred(_ctx, 12))) throw new FailedPredicateException(this, "precpred(_ctx, 12)"); - setState(210); + setState(223); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << MUL) | (1L << DIV) | (1L << REM))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(211); + setState(224); expression(13); } break; @@ -1613,16 +1647,16 @@ class PlanAParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(212); + setState(225); if (!(precpred(_ctx, 11))) throw new FailedPredicateException(this, "precpred(_ctx, 11)"); - setState(213); + setState(226); _la = _input.LA(1); if ( !(_la==ADD || _la==SUB) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(214); + setState(227); expression(12); } break; @@ -1630,16 +1664,16 @@ class PlanAParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(215); + setState(228); if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)"); - setState(216); + setState(229); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LSH) | (1L << RSH) | (1L << USH))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(217); + setState(230); expression(11); } break; @@ -1647,16 +1681,16 @@ class PlanAParser extends Parser { { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(218); + setState(231); if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)"); - setState(219); + setState(232); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(220); + setState(233); expression(10); } break; @@ -1664,16 +1698,16 @@ class PlanAParser extends Parser { { _localctx = new CompContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(221); + setState(234); if (!(precpred(_ctx, 8))) throw new FailedPredicateException(this, "precpred(_ctx, 8)"); - setState(222); + setState(235); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << EQR) | (1L << NE) | (1L << NER))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(223); + setState(236); expression(9); } break; @@ -1681,11 +1715,11 @@ class PlanAParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(224); + setState(237); if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)"); - setState(225); + setState(238); match(BWAND); - setState(226); + setState(239); expression(8); } break; @@ -1693,11 +1727,11 @@ class PlanAParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(227); + setState(240); if (!(precpred(_ctx, 6))) throw new FailedPredicateException(this, "precpred(_ctx, 6)"); - setState(228); + setState(241); match(BWXOR); - setState(229); + setState(242); expression(7); } break; @@ -1705,11 +1739,11 @@ class PlanAParser extends Parser { { _localctx = new BinaryContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(230); + setState(243); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(231); + setState(244); match(BWOR); - setState(232); + setState(245); expression(6); } break; @@ -1717,11 +1751,11 @@ class PlanAParser extends Parser { { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(233); + setState(246); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(234); + setState(247); match(BOOLAND); - setState(235); + setState(248); expression(5); } break; @@ -1729,11 +1763,11 @@ class PlanAParser extends Parser { { _localctx = new BoolContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(236); + setState(249); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(237); + setState(250); match(BOOLOR); - setState(238); + setState(251); expression(4); } break; @@ -1741,24 +1775,24 @@ class PlanAParser extends Parser { { _localctx = new ConditionalContext(new ExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_expression); - setState(239); + setState(252); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(240); + setState(253); match(COND); - setState(241); + setState(254); expression(0); - setState(242); + setState(255); match(COLON); - setState(243); + setState(256); expression(2); } break; } } } - setState(249); + setState(262); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,24,_ctx); + _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } } } @@ -1805,49 +1839,49 @@ class PlanAParser extends Parser { public final ExtstartContext extstart() throws RecognitionException { ExtstartContext _localctx = new ExtstartContext(_ctx, getState()); - enterRule(_localctx, 20, RULE_extstart); + enterRule(_localctx, 24, RULE_extstart); try { - setState(256); - switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { + setState(269); + switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(250); + setState(263); extprec(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(251); + setState(264); extcast(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(252); + setState(265); exttype(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(253); + setState(266); extvar(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(254); + setState(267); extnew(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(255); + setState(268); extstring(); } break; @@ -1904,64 +1938,64 @@ class PlanAParser extends Parser { public final ExtprecContext extprec() throws RecognitionException { ExtprecContext _localctx = new ExtprecContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_extprec); + enterRule(_localctx, 26, RULE_extprec); try { enterOuterAlt(_localctx, 1); { - setState(258); + setState(271); match(LP); - setState(265); - switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { + setState(278); + switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(259); + setState(272); extprec(); } break; case 2: { - setState(260); + setState(273); extcast(); } break; case 3: { - setState(261); + setState(274); exttype(); } break; case 4: { - setState(262); + setState(275); extvar(); } break; case 5: { - setState(263); + setState(276); extnew(); } break; case 6: { - setState(264); + setState(277); extstring(); } break; } - setState(267); + setState(280); match(RP); - setState(270); - switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { + setState(283); + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(268); + setState(281); extdot(); } break; case 2: { - setState(269); + setState(282); extbrace(); } break; @@ -2016,51 +2050,51 @@ class PlanAParser extends Parser { public final ExtcastContext extcast() throws RecognitionException { ExtcastContext _localctx = new ExtcastContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_extcast); + enterRule(_localctx, 28, RULE_extcast); try { enterOuterAlt(_localctx, 1); { - setState(272); + setState(285); match(LP); - setState(273); + setState(286); decltype(); - setState(274); + setState(287); match(RP); - setState(281); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { + setState(294); + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(275); + setState(288); extprec(); } break; case 2: { - setState(276); + setState(289); extcast(); } break; case 3: { - setState(277); + setState(290); exttype(); } break; case 4: { - setState(278); + setState(291); extvar(); } break; case 5: { - setState(279); + setState(292); extnew(); } break; case 6: { - setState(280); + setState(293); extstring(); } break; @@ -2103,27 +2137,27 @@ class PlanAParser extends Parser { public final ExtbraceContext extbrace() throws RecognitionException { ExtbraceContext _localctx = new ExtbraceContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_extbrace); + enterRule(_localctx, 30, RULE_extbrace); try { enterOuterAlt(_localctx, 1); { - setState(283); + setState(296); match(LBRACE); - setState(284); + setState(297); expression(0); - setState(285); + setState(298); match(RBRACE); - setState(288); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + setState(301); + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(286); + setState(299); extdot(); } break; case 2: { - setState(287); + setState(300); extbrace(); } break; @@ -2162,23 +2196,23 @@ class PlanAParser extends Parser { public final ExtdotContext extdot() throws RecognitionException { ExtdotContext _localctx = new ExtdotContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_extdot); + enterRule(_localctx, 32, RULE_extdot); try { enterOuterAlt(_localctx, 1); { - setState(290); + setState(303); match(DOT); - setState(293); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + setState(306); + switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(291); + setState(304); extcall(); } break; case 2: { - setState(292); + setState(305); extfield(); } break; @@ -2214,13 +2248,13 @@ class PlanAParser extends Parser { public final ExttypeContext exttype() throws RecognitionException { ExttypeContext _localctx = new ExttypeContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_exttype); + enterRule(_localctx, 34, RULE_exttype); try { enterOuterAlt(_localctx, 1); { - setState(295); + setState(308); match(TYPE); - setState(296); + setState(309); extdot(); } } @@ -2259,25 +2293,25 @@ class PlanAParser extends Parser { public final ExtcallContext extcall() throws RecognitionException { ExtcallContext _localctx = new ExtcallContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_extcall); + enterRule(_localctx, 36, RULE_extcall); try { enterOuterAlt(_localctx, 1); { - setState(298); + setState(311); match(EXTID); - setState(299); + setState(312); arguments(); - setState(302); - switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { + setState(315); + switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(300); + setState(313); extdot(); } break; case 2: { - setState(301); + setState(314); extbrace(); } break; @@ -2316,23 +2350,23 @@ class PlanAParser extends Parser { public final ExtvarContext extvar() throws RecognitionException { ExtvarContext _localctx = new ExtvarContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_extvar); + enterRule(_localctx, 38, RULE_extvar); try { enterOuterAlt(_localctx, 1); { - setState(304); + setState(317); match(ID); - setState(307); - switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { + setState(320); + switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { case 1: { - setState(305); + setState(318); extdot(); } break; case 2: { - setState(306); + setState(319); extbrace(); } break; @@ -2372,29 +2406,29 @@ class PlanAParser extends Parser { public final ExtfieldContext extfield() throws RecognitionException { ExtfieldContext _localctx = new ExtfieldContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_extfield); + enterRule(_localctx, 40, RULE_extfield); int _la; try { enterOuterAlt(_localctx, 1); { - setState(309); + setState(322); _la = _input.LA(1); if ( !(_la==EXTINTEGER || _la==EXTID) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(312); - switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { + setState(325); + switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(310); + setState(323); extdot(); } break; case 2: { - setState(311); + setState(324); extbrace(); } break; @@ -2451,33 +2485,33 @@ class PlanAParser extends Parser { public final ExtnewContext extnew() throws RecognitionException { ExtnewContext _localctx = new ExtnewContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_extnew); + enterRule(_localctx, 42, RULE_extnew); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(314); + setState(327); match(NEW); - setState(315); + setState(328); match(TYPE); - setState(332); + setState(345); switch (_input.LA(1)) { case LP: { { - setState(316); + setState(329); arguments(); - setState(319); - switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { + setState(332); + switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: { - setState(317); + setState(330); extdot(); } break; case 2: { - setState(318); + setState(331); extbrace(); } break; @@ -2488,7 +2522,7 @@ class PlanAParser extends Parser { case LBRACE: { { - setState(325); + setState(338); _errHandler.sync(this); _alt = 1; do { @@ -2496,11 +2530,11 @@ class PlanAParser extends Parser { case 1: { { - setState(321); + setState(334); match(LBRACE); - setState(322); + setState(335); expression(0); - setState(323); + setState(336); match(RBRACE); } } @@ -2508,15 +2542,15 @@ class PlanAParser extends Parser { default: throw new NoViableAltException(this); } - setState(327); + setState(340); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); - setState(330); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + setState(343); + switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { case 1: { - setState(329); + setState(342); extdot(); } break; @@ -2561,23 +2595,23 @@ class PlanAParser extends Parser { public final ExtstringContext extstring() throws RecognitionException { ExtstringContext _localctx = new ExtstringContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_extstring); + enterRule(_localctx, 44, RULE_extstring); try { enterOuterAlt(_localctx, 1); { - setState(334); + setState(347); match(STRING); - setState(337); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + setState(350); + switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { case 1: { - setState(335); + setState(348); extdot(); } break; case 2: { - setState(336); + setState(349); extbrace(); } break; @@ -2621,40 +2655,40 @@ class PlanAParser extends Parser { public final ArgumentsContext arguments() throws RecognitionException { ArgumentsContext _localctx = new ArgumentsContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_arguments); + enterRule(_localctx, 46, RULE_arguments); int _la; try { enterOuterAlt(_localctx, 1); { { - setState(339); + setState(352); match(LP); - setState(348); + setState(361); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << LP) | (1L << NEW) | (1L << BOOLNOT) | (1L << BWNOT) | (1L << ADD) | (1L << SUB) | (1L << INCR) | (1L << DECR))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (OCTAL - 64)) | (1L << (HEX - 64)) | (1L << (INTEGER - 64)) | (1L << (DECIMAL - 64)) | (1L << (STRING - 64)) | (1L << (CHAR - 64)) | (1L << (TRUE - 64)) | (1L << (FALSE - 64)) | (1L << (NULL - 64)) | (1L << (TYPE - 64)) | (1L << (ID - 64)))) != 0)) { { - setState(340); + setState(353); expression(0); - setState(345); + setState(358); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(341); + setState(354); match(COMMA); - setState(342); + setState(355); expression(0); } } - setState(347); + setState(360); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(350); + setState(363); match(RP); } } @@ -2686,12 +2720,12 @@ class PlanAParser extends Parser { public final IncrementContext increment() throws RecognitionException { IncrementContext _localctx = new IncrementContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_increment); + enterRule(_localctx, 48, RULE_increment); int _la; try { enterOuterAlt(_localctx, 1); { - setState(352); + setState(365); _la = _input.LA(1); if ( !(_la==INCR || _la==DECR) ) { _errHandler.recoverInline(this); @@ -2713,7 +2747,7 @@ class PlanAParser extends Parser { public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 9: + case 11: return expression_sempred((ExpressionContext)_localctx, predIndex); } return true; @@ -2747,144 +2781,148 @@ class PlanAParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3N\u0165\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3N\u0172\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ - "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\3\2\6\2\62"+ - "\n\2\r\2\16\2\63\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3?\n\3\3\3\3\3"+ - "\3\3\3\3\3\3\3\3\5\3G\n\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3P\n\3\3\3\3\3"+ - "\3\3\5\3U\n\3\3\3\3\3\5\3Y\n\3\3\3\3\3\5\3]\n\3\3\3\3\3\3\3\5\3b\n\3\3"+ - "\3\3\3\5\3f\n\3\3\3\3\3\5\3j\n\3\3\3\3\3\5\3n\n\3\3\3\3\3\3\3\5\3s\n\3"+ - "\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\6\3~\n\3\r\3\16\3\177\3\3\3\3\3\3"+ - "\5\3\u0085\n\3\3\3\3\3\5\3\u0089\n\3\5\3\u008b\n\3\3\4\3\4\7\4\u008f\n"+ - "\4\f\4\16\4\u0092\13\4\3\4\3\4\5\4\u0096\n\4\3\5\3\5\3\6\3\6\5\6\u009c"+ - "\n\6\3\7\3\7\3\b\3\b\3\b\3\b\7\b\u00a4\n\b\f\b\16\b\u00a7\13\b\3\t\3\t"+ - "\3\t\7\t\u00ac\n\t\f\t\16\t\u00af\13\t\3\n\3\n\3\n\5\n\u00b4\n\n\3\13"+ - "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13"+ - "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\5\13"+ - "\u00d2\n\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13"+ - "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13"+ - "\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\7\13\u00f8\n\13\f\13"+ - "\16\13\u00fb\13\13\3\f\3\f\3\f\3\f\3\f\3\f\5\f\u0103\n\f\3\r\3\r\3\r\3"+ - "\r\3\r\3\r\3\r\5\r\u010c\n\r\3\r\3\r\3\r\5\r\u0111\n\r\3\16\3\16\3\16"+ - "\3\16\3\16\3\16\3\16\3\16\3\16\5\16\u011c\n\16\3\17\3\17\3\17\3\17\3\17"+ - "\5\17\u0123\n\17\3\20\3\20\3\20\5\20\u0128\n\20\3\21\3\21\3\21\3\22\3"+ - "\22\3\22\3\22\5\22\u0131\n\22\3\23\3\23\3\23\5\23\u0136\n\23\3\24\3\24"+ - "\3\24\5\24\u013b\n\24\3\25\3\25\3\25\3\25\3\25\5\25\u0142\n\25\3\25\3"+ - "\25\3\25\3\25\6\25\u0148\n\25\r\25\16\25\u0149\3\25\5\25\u014d\n\25\5"+ - "\25\u014f\n\25\3\26\3\26\3\26\5\26\u0154\n\26\3\27\3\27\3\27\3\27\7\27"+ - "\u015a\n\27\f\27\16\27\u015d\13\27\5\27\u015f\n\27\3\27\3\27\3\30\3\30"+ - "\3\30\2\3\24\31\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\2\f\4"+ - "\2\32\33\37 \3\2\65@\3\2BE\3\2\34\36\3\2\37 \3\2!#\3\2$\'\3\2(+\3\2MN"+ - "\3\2\63\64\u01a5\2\61\3\2\2\2\4\u008a\3\2\2\2\6\u0095\3\2\2\2\b\u0097"+ - "\3\2\2\2\n\u009b\3\2\2\2\f\u009d\3\2\2\2\16\u009f\3\2\2\2\20\u00a8\3\2"+ - "\2\2\22\u00b0\3\2\2\2\24\u00d1\3\2\2\2\26\u0102\3\2\2\2\30\u0104\3\2\2"+ - "\2\32\u0112\3\2\2\2\34\u011d\3\2\2\2\36\u0124\3\2\2\2 \u0129\3\2\2\2\""+ - "\u012c\3\2\2\2$\u0132\3\2\2\2&\u0137\3\2\2\2(\u013c\3\2\2\2*\u0150\3\2"+ - "\2\2,\u0155\3\2\2\2.\u0162\3\2\2\2\60\62\5\4\3\2\61\60\3\2\2\2\62\63\3"+ - "\2\2\2\63\61\3\2\2\2\63\64\3\2\2\2\64\65\3\2\2\2\65\66\7\2\2\3\66\3\3"+ - "\2\2\2\678\7\16\2\289\7\t\2\29:\5\24\13\2:;\7\n\2\2;>\5\6\4\2<=\7\17\2"+ - "\2=?\5\6\4\2><\3\2\2\2>?\3\2\2\2?\u008b\3\2\2\2@A\7\20\2\2AB\7\t\2\2B"+ - "C\5\24\13\2CF\7\n\2\2DG\5\6\4\2EG\5\b\5\2FD\3\2\2\2FE\3\2\2\2G\u008b\3"+ - "\2\2\2HI\7\21\2\2IJ\5\6\4\2JK\7\20\2\2KL\7\t\2\2LM\5\24\13\2MO\7\n\2\2"+ - "NP\7\r\2\2ON\3\2\2\2OP\3\2\2\2P\u008b\3\2\2\2QR\7\22\2\2RT\7\t\2\2SU\5"+ - "\n\6\2TS\3\2\2\2TU\3\2\2\2UV\3\2\2\2VX\7\r\2\2WY\5\24\13\2XW\3\2\2\2X"+ - "Y\3\2\2\2YZ\3\2\2\2Z\\\7\r\2\2[]\5\f\7\2\\[\3\2\2\2\\]\3\2\2\2]^\3\2\2"+ - "\2^a\7\n\2\2_b\5\6\4\2`b\5\b\5\2a_\3\2\2\2a`\3\2\2\2b\u008b\3\2\2\2ce"+ - "\5\16\b\2df\7\r\2\2ed\3\2\2\2ef\3\2\2\2f\u008b\3\2\2\2gi\7\23\2\2hj\7"+ - "\r\2\2ih\3\2\2\2ij\3\2\2\2j\u008b\3\2\2\2km\7\24\2\2ln\7\r\2\2ml\3\2\2"+ - "\2mn\3\2\2\2n\u008b\3\2\2\2op\7\25\2\2pr\5\24\13\2qs\7\r\2\2rq\3\2\2\2"+ - "rs\3\2\2\2s\u008b\3\2\2\2tu\7\27\2\2u}\5\6\4\2vw\7\30\2\2wx\7\t\2\2xy"+ - "\7K\2\2yz\7L\2\2z{\3\2\2\2{|\7\n\2\2|~\5\6\4\2}v\3\2\2\2~\177\3\2\2\2"+ - "\177}\3\2\2\2\177\u0080\3\2\2\2\u0080\u008b\3\2\2\2\u0081\u0082\7\31\2"+ - "\2\u0082\u0084\5\24\13\2\u0083\u0085\7\r\2\2\u0084\u0083\3\2\2\2\u0084"+ - "\u0085\3\2\2\2\u0085\u008b\3\2\2\2\u0086\u0088\5\24\13\2\u0087\u0089\7"+ - "\r\2\2\u0088\u0087\3\2\2\2\u0088\u0089\3\2\2\2\u0089\u008b\3\2\2\2\u008a"+ - "\67\3\2\2\2\u008a@\3\2\2\2\u008aH\3\2\2\2\u008aQ\3\2\2\2\u008ac\3\2\2"+ - "\2\u008ag\3\2\2\2\u008ak\3\2\2\2\u008ao\3\2\2\2\u008at\3\2\2\2\u008a\u0081"+ - "\3\2\2\2\u008a\u0086\3\2\2\2\u008b\5\3\2\2\2\u008c\u0090\7\5\2\2\u008d"+ - "\u008f\5\4\3\2\u008e\u008d\3\2\2\2\u008f\u0092\3\2\2\2\u0090\u008e\3\2"+ - "\2\2\u0090\u0091\3\2\2\2\u0091\u0093\3\2\2\2\u0092\u0090\3\2\2\2\u0093"+ - "\u0096\7\6\2\2\u0094\u0096\5\4\3\2\u0095\u008c\3\2\2\2\u0095\u0094\3\2"+ - "\2\2\u0096\7\3\2\2\2\u0097\u0098\7\r\2\2\u0098\t\3\2\2\2\u0099\u009c\5"+ - "\16\b\2\u009a\u009c\5\24\13\2\u009b\u0099\3\2\2\2\u009b\u009a\3\2\2\2"+ - "\u009c\13\3\2\2\2\u009d\u009e\5\24\13\2\u009e\r\3\2\2\2\u009f\u00a0\5"+ - "\20\t\2\u00a0\u00a5\5\22\n\2\u00a1\u00a2\7\f\2\2\u00a2\u00a4\5\22\n\2"+ - "\u00a3\u00a1\3\2\2\2\u00a4\u00a7\3\2\2\2\u00a5\u00a3\3\2\2\2\u00a5\u00a6"+ - "\3\2\2\2\u00a6\17\3\2\2\2\u00a7\u00a5\3\2\2\2\u00a8\u00ad\7K\2\2\u00a9"+ - "\u00aa\7\7\2\2\u00aa\u00ac\7\b\2\2\u00ab\u00a9\3\2\2\2\u00ac\u00af\3\2"+ - "\2\2\u00ad\u00ab\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ae\21\3\2\2\2\u00af\u00ad"+ - "\3\2\2\2\u00b0\u00b3\7L\2\2\u00b1\u00b2\7\65\2\2\u00b2\u00b4\5\24\13\2"+ - "\u00b3\u00b1\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\23\3\2\2\2\u00b5\u00b6"+ - "\b\13\1\2\u00b6\u00b7\t\2\2\2\u00b7\u00d2\5\24\13\20\u00b8\u00b9\7\t\2"+ - "\2\u00b9\u00ba\5\20\t\2\u00ba\u00bb\7\n\2\2\u00bb\u00bc\5\24\13\17\u00bc"+ - "\u00d2\3\2\2\2\u00bd\u00be\5\26\f\2\u00be\u00bf\t\3\2\2\u00bf\u00c0\5"+ - "\24\13\3\u00c0\u00d2\3\2\2\2\u00c1\u00c2\7\t\2\2\u00c2\u00c3\5\24\13\2"+ - "\u00c3\u00c4\7\n\2\2\u00c4\u00d2\3\2\2\2\u00c5\u00d2\t\4\2\2\u00c6\u00d2"+ - "\7G\2\2\u00c7\u00d2\7H\2\2\u00c8\u00d2\7I\2\2\u00c9\u00d2\7J\2\2\u00ca"+ - "\u00cb\5\26\f\2\u00cb\u00cc\5.\30\2\u00cc\u00d2\3\2\2\2\u00cd\u00ce\5"+ - ".\30\2\u00ce\u00cf\5\26\f\2\u00cf\u00d2\3\2\2\2\u00d0\u00d2\5\26\f\2\u00d1"+ - "\u00b5\3\2\2\2\u00d1\u00b8\3\2\2\2\u00d1\u00bd\3\2\2\2\u00d1\u00c1\3\2"+ - "\2\2\u00d1\u00c5\3\2\2\2\u00d1\u00c6\3\2\2\2\u00d1\u00c7\3\2\2\2\u00d1"+ - "\u00c8\3\2\2\2\u00d1\u00c9\3\2\2\2\u00d1\u00ca\3\2\2\2\u00d1\u00cd\3\2"+ - "\2\2\u00d1\u00d0\3\2\2\2\u00d2\u00f9\3\2\2\2\u00d3\u00d4\f\16\2\2\u00d4"+ - "\u00d5\t\5\2\2\u00d5\u00f8\5\24\13\17\u00d6\u00d7\f\r\2\2\u00d7\u00d8"+ - "\t\6\2\2\u00d8\u00f8\5\24\13\16\u00d9\u00da\f\f\2\2\u00da\u00db\t\7\2"+ - "\2\u00db\u00f8\5\24\13\r\u00dc\u00dd\f\13\2\2\u00dd\u00de\t\b\2\2\u00de"+ - "\u00f8\5\24\13\f\u00df\u00e0\f\n\2\2\u00e0\u00e1\t\t\2\2\u00e1\u00f8\5"+ - "\24\13\13\u00e2\u00e3\f\t\2\2\u00e3\u00e4\7,\2\2\u00e4\u00f8\5\24\13\n"+ - "\u00e5\u00e6\f\b\2\2\u00e6\u00e7\7-\2\2\u00e7\u00f8\5\24\13\t\u00e8\u00e9"+ - "\f\7\2\2\u00e9\u00ea\7.\2\2\u00ea\u00f8\5\24\13\b\u00eb\u00ec\f\6\2\2"+ - "\u00ec\u00ed\7/\2\2\u00ed\u00f8\5\24\13\7\u00ee\u00ef\f\5\2\2\u00ef\u00f0"+ - "\7\60\2\2\u00f0\u00f8\5\24\13\6\u00f1\u00f2\f\4\2\2\u00f2\u00f3\7\61\2"+ - "\2\u00f3\u00f4\5\24\13\2\u00f4\u00f5\7\62\2\2\u00f5\u00f6\5\24\13\4\u00f6"+ - "\u00f8\3\2\2\2\u00f7\u00d3\3\2\2\2\u00f7\u00d6\3\2\2\2\u00f7\u00d9\3\2"+ - "\2\2\u00f7\u00dc\3\2\2\2\u00f7\u00df\3\2\2\2\u00f7\u00e2\3\2\2\2\u00f7"+ - "\u00e5\3\2\2\2\u00f7\u00e8\3\2\2\2\u00f7\u00eb\3\2\2\2\u00f7\u00ee\3\2"+ - "\2\2\u00f7\u00f1\3\2\2\2\u00f8\u00fb\3\2\2\2\u00f9\u00f7\3\2\2\2\u00f9"+ - "\u00fa\3\2\2\2\u00fa\25\3\2\2\2\u00fb\u00f9\3\2\2\2\u00fc\u0103\5\30\r"+ - "\2\u00fd\u0103\5\32\16\2\u00fe\u0103\5 \21\2\u00ff\u0103\5$\23\2\u0100"+ - "\u0103\5(\25\2\u0101\u0103\5*\26\2\u0102\u00fc\3\2\2\2\u0102\u00fd\3\2"+ - "\2\2\u0102\u00fe\3\2\2\2\u0102\u00ff\3\2\2\2\u0102\u0100\3\2\2\2\u0102"+ - "\u0101\3\2\2\2\u0103\27\3\2\2\2\u0104\u010b\7\t\2\2\u0105\u010c\5\30\r"+ - "\2\u0106\u010c\5\32\16\2\u0107\u010c\5 \21\2\u0108\u010c\5$\23\2\u0109"+ - "\u010c\5(\25\2\u010a\u010c\5*\26\2\u010b\u0105\3\2\2\2\u010b\u0106\3\2"+ - "\2\2\u010b\u0107\3\2\2\2\u010b\u0108\3\2\2\2\u010b\u0109\3\2\2\2\u010b"+ - "\u010a\3\2\2\2\u010c\u010d\3\2\2\2\u010d\u0110\7\n\2\2\u010e\u0111\5\36"+ - "\20\2\u010f\u0111\5\34\17\2\u0110\u010e\3\2\2\2\u0110\u010f\3\2\2\2\u0110"+ - "\u0111\3\2\2\2\u0111\31\3\2\2\2\u0112\u0113\7\t\2\2\u0113\u0114\5\20\t"+ - "\2\u0114\u011b\7\n\2\2\u0115\u011c\5\30\r\2\u0116\u011c\5\32\16\2\u0117"+ - "\u011c\5 \21\2\u0118\u011c\5$\23\2\u0119\u011c\5(\25\2\u011a\u011c\5*"+ - "\26\2\u011b\u0115\3\2\2\2\u011b\u0116\3\2\2\2\u011b\u0117\3\2\2\2\u011b"+ - "\u0118\3\2\2\2\u011b\u0119\3\2\2\2\u011b\u011a\3\2\2\2\u011c\33\3\2\2"+ - "\2\u011d\u011e\7\7\2\2\u011e\u011f\5\24\13\2\u011f\u0122\7\b\2\2\u0120"+ - "\u0123\5\36\20\2\u0121\u0123\5\34\17\2\u0122\u0120\3\2\2\2\u0122\u0121"+ - "\3\2\2\2\u0122\u0123\3\2\2\2\u0123\35\3\2\2\2\u0124\u0127\7\13\2\2\u0125"+ - "\u0128\5\"\22\2\u0126\u0128\5&\24\2\u0127\u0125\3\2\2\2\u0127\u0126\3"+ - "\2\2\2\u0128\37\3\2\2\2\u0129\u012a\7K\2\2\u012a\u012b\5\36\20\2\u012b"+ - "!\3\2\2\2\u012c\u012d\7N\2\2\u012d\u0130\5,\27\2\u012e\u0131\5\36\20\2"+ - "\u012f\u0131\5\34\17\2\u0130\u012e\3\2\2\2\u0130\u012f\3\2\2\2\u0130\u0131"+ - "\3\2\2\2\u0131#\3\2\2\2\u0132\u0135\7L\2\2\u0133\u0136\5\36\20\2\u0134"+ - "\u0136\5\34\17\2\u0135\u0133\3\2\2\2\u0135\u0134\3\2\2\2\u0135\u0136\3"+ - "\2\2\2\u0136%\3\2\2\2\u0137\u013a\t\n\2\2\u0138\u013b\5\36\20\2\u0139"+ - "\u013b\5\34\17\2\u013a\u0138\3\2\2\2\u013a\u0139\3\2\2\2\u013a\u013b\3"+ - "\2\2\2\u013b\'\3\2\2\2\u013c\u013d\7\26\2\2\u013d\u014e\7K\2\2\u013e\u0141"+ - "\5,\27\2\u013f\u0142\5\36\20\2\u0140\u0142\5\34\17\2\u0141\u013f\3\2\2"+ - "\2\u0141\u0140\3\2\2\2\u0141\u0142\3\2\2\2\u0142\u014f\3\2\2\2\u0143\u0144"+ - "\7\7\2\2\u0144\u0145\5\24\13\2\u0145\u0146\7\b\2\2\u0146\u0148\3\2\2\2"+ - "\u0147\u0143\3\2\2\2\u0148\u0149\3\2\2\2\u0149\u0147\3\2\2\2\u0149\u014a"+ - "\3\2\2\2\u014a\u014c\3\2\2\2\u014b\u014d\5\36\20\2\u014c\u014b\3\2\2\2"+ - "\u014c\u014d\3\2\2\2\u014d\u014f\3\2\2\2\u014e\u013e\3\2\2\2\u014e\u0147"+ - "\3\2\2\2\u014f)\3\2\2\2\u0150\u0153\7F\2\2\u0151\u0154\5\36\20\2\u0152"+ - "\u0154\5\34\17\2\u0153\u0151\3\2\2\2\u0153\u0152\3\2\2\2\u0153\u0154\3"+ - "\2\2\2\u0154+\3\2\2\2\u0155\u015e\7\t\2\2\u0156\u015b\5\24\13\2\u0157"+ - "\u0158\7\f\2\2\u0158\u015a\5\24\13\2\u0159\u0157\3\2\2\2\u015a\u015d\3"+ - "\2\2\2\u015b\u0159\3\2\2\2\u015b\u015c\3\2\2\2\u015c\u015f\3\2\2\2\u015d"+ - "\u015b\3\2\2\2\u015e\u0156\3\2\2\2\u015e\u015f\3\2\2\2\u015f\u0160\3\2"+ - "\2\2\u0160\u0161\7\n\2\2\u0161-\3\2\2\2\u0162\u0163\t\13\2\2\u0163/\3"+ - "\2\2\2+\63>FOTX\\aeimr\177\u0084\u0088\u008a\u0090\u0095\u009b\u00a5\u00ad"+ - "\u00b3\u00d1\u00f7\u00f9\u0102\u010b\u0110\u011b\u0122\u0127\u0130\u0135"+ - "\u013a\u0141\u0149\u014c\u014e\u0153\u015b\u015e"; + "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ + "\4\32\t\32\3\2\6\2\66\n\2\r\2\16\2\67\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3"+ + "\3\3\5\3C\n\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3K\n\3\3\3\3\3\3\3\3\3\3\3\3\3"+ + "\3\3\5\3T\n\3\3\3\3\3\3\3\5\3Y\n\3\3\3\3\3\5\3]\n\3\3\3\3\3\5\3a\n\3\3"+ + "\3\3\3\3\3\5\3f\n\3\3\3\3\3\5\3j\n\3\3\3\3\3\5\3n\n\3\3\3\3\3\5\3r\n\3"+ + "\3\3\3\3\3\3\5\3w\n\3\3\3\3\3\3\3\6\3|\n\3\r\3\16\3}\3\3\3\3\3\3\5\3\u0083"+ + "\n\3\3\3\3\3\5\3\u0087\n\3\5\3\u0089\n\3\3\4\3\4\6\4\u008d\n\4\r\4\16"+ + "\4\u008e\3\4\3\4\3\4\5\4\u0094\n\4\3\5\3\5\5\5\u0098\n\5\3\6\3\6\3\6\3"+ + "\7\3\7\5\7\u009f\n\7\3\b\3\b\3\t\3\t\3\t\3\t\7\t\u00a7\n\t\f\t\16\t\u00aa"+ + "\13\t\3\n\3\n\3\n\7\n\u00af\n\n\f\n\16\n\u00b2\13\n\3\13\3\13\3\13\5\13"+ + "\u00b7\n\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\5\f\u00c1\n\f\3\r\3\r\3\r"+ + "\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3"+ + "\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\5\r\u00df\n\r\3\r\3\r\3\r\3\r\3\r\3\r\3"+ + "\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r"+ + "\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\7\r\u0105\n\r\f\r\16"+ + "\r\u0108\13\r\3\16\3\16\3\16\3\16\3\16\3\16\5\16\u0110\n\16\3\17\3\17"+ + "\3\17\3\17\3\17\3\17\3\17\5\17\u0119\n\17\3\17\3\17\3\17\5\17\u011e\n"+ + "\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\5\20\u0129\n\20\3\21"+ + "\3\21\3\21\3\21\3\21\5\21\u0130\n\21\3\22\3\22\3\22\5\22\u0135\n\22\3"+ + "\23\3\23\3\23\3\24\3\24\3\24\3\24\5\24\u013e\n\24\3\25\3\25\3\25\5\25"+ + "\u0143\n\25\3\26\3\26\3\26\5\26\u0148\n\26\3\27\3\27\3\27\3\27\3\27\5"+ + "\27\u014f\n\27\3\27\3\27\3\27\3\27\6\27\u0155\n\27\r\27\16\27\u0156\3"+ + "\27\5\27\u015a\n\27\5\27\u015c\n\27\3\30\3\30\3\30\5\30\u0161\n\30\3\31"+ + "\3\31\3\31\3\31\7\31\u0167\n\31\f\31\16\31\u016a\13\31\5\31\u016c\n\31"+ + "\3\31\3\31\3\32\3\32\3\32\2\3\30\33\2\4\6\b\n\f\16\20\22\24\26\30\32\34"+ + "\36 \"$&(*,.\60\62\2\f\4\2\32\33\37 \3\2\65@\3\2BE\3\2\34\36\3\2\37 \3"+ + "\2!#\3\2$\'\3\2(+\3\2MN\3\2\63\64\u01b2\2\65\3\2\2\2\4\u0088\3\2\2\2\6"+ + "\u0093\3\2\2\2\b\u0097\3\2\2\2\n\u0099\3\2\2\2\f\u009e\3\2\2\2\16\u00a0"+ + "\3\2\2\2\20\u00a2\3\2\2\2\22\u00ab\3\2\2\2\24\u00b3\3\2\2\2\26\u00b8\3"+ + "\2\2\2\30\u00de\3\2\2\2\32\u010f\3\2\2\2\34\u0111\3\2\2\2\36\u011f\3\2"+ + "\2\2 \u012a\3\2\2\2\"\u0131\3\2\2\2$\u0136\3\2\2\2&\u0139\3\2\2\2(\u013f"+ + "\3\2\2\2*\u0144\3\2\2\2,\u0149\3\2\2\2.\u015d\3\2\2\2\60\u0162\3\2\2\2"+ + "\62\u016f\3\2\2\2\64\66\5\4\3\2\65\64\3\2\2\2\66\67\3\2\2\2\67\65\3\2"+ + "\2\2\678\3\2\2\289\3\2\2\29:\7\2\2\3:\3\3\2\2\2;<\7\16\2\2<=\7\t\2\2="+ + ">\5\30\r\2>?\7\n\2\2?B\5\6\4\2@A\7\17\2\2AC\5\6\4\2B@\3\2\2\2BC\3\2\2"+ + "\2C\u0089\3\2\2\2DE\7\20\2\2EF\7\t\2\2FG\5\30\r\2GJ\7\n\2\2HK\5\6\4\2"+ + "IK\5\b\5\2JH\3\2\2\2JI\3\2\2\2K\u0089\3\2\2\2LM\7\21\2\2MN\5\6\4\2NO\7"+ + "\20\2\2OP\7\t\2\2PQ\5\30\r\2QS\7\n\2\2RT\7\r\2\2SR\3\2\2\2ST\3\2\2\2T"+ + "\u0089\3\2\2\2UV\7\22\2\2VX\7\t\2\2WY\5\f\7\2XW\3\2\2\2XY\3\2\2\2YZ\3"+ + "\2\2\2Z\\\7\r\2\2[]\5\30\r\2\\[\3\2\2\2\\]\3\2\2\2]^\3\2\2\2^`\7\r\2\2"+ + "_a\5\16\b\2`_\3\2\2\2`a\3\2\2\2ab\3\2\2\2be\7\n\2\2cf\5\6\4\2df\5\b\5"+ + "\2ec\3\2\2\2ed\3\2\2\2f\u0089\3\2\2\2gi\5\20\t\2hj\7\r\2\2ih\3\2\2\2i"+ + "j\3\2\2\2j\u0089\3\2\2\2km\7\23\2\2ln\7\r\2\2ml\3\2\2\2mn\3\2\2\2n\u0089"+ + "\3\2\2\2oq\7\24\2\2pr\7\r\2\2qp\3\2\2\2qr\3\2\2\2r\u0089\3\2\2\2st\7\25"+ + "\2\2tv\5\30\r\2uw\7\r\2\2vu\3\2\2\2vw\3\2\2\2w\u0089\3\2\2\2xy\7\27\2"+ + "\2y{\5\6\4\2z|\5\26\f\2{z\3\2\2\2|}\3\2\2\2}{\3\2\2\2}~\3\2\2\2~\u0089"+ + "\3\2\2\2\177\u0080\7\31\2\2\u0080\u0082\5\30\r\2\u0081\u0083\7\r\2\2\u0082"+ + "\u0081\3\2\2\2\u0082\u0083\3\2\2\2\u0083\u0089\3\2\2\2\u0084\u0086\5\30"+ + "\r\2\u0085\u0087\7\r\2\2\u0086\u0085\3\2\2\2\u0086\u0087\3\2\2\2\u0087"+ + "\u0089\3\2\2\2\u0088;\3\2\2\2\u0088D\3\2\2\2\u0088L\3\2\2\2\u0088U\3\2"+ + "\2\2\u0088g\3\2\2\2\u0088k\3\2\2\2\u0088o\3\2\2\2\u0088s\3\2\2\2\u0088"+ + "x\3\2\2\2\u0088\177\3\2\2\2\u0088\u0084\3\2\2\2\u0089\5\3\2\2\2\u008a"+ + "\u008c\7\5\2\2\u008b\u008d\5\4\3\2\u008c\u008b\3\2\2\2\u008d\u008e\3\2"+ + "\2\2\u008e\u008c\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u0090\3\2\2\2\u0090"+ + "\u0091\7\6\2\2\u0091\u0094\3\2\2\2\u0092\u0094\5\4\3\2\u0093\u008a\3\2"+ + "\2\2\u0093\u0092\3\2\2\2\u0094\7\3\2\2\2\u0095\u0098\5\n\6\2\u0096\u0098"+ + "\7\r\2\2\u0097\u0095\3\2\2\2\u0097\u0096\3\2\2\2\u0098\t\3\2\2\2\u0099"+ + "\u009a\7\5\2\2\u009a\u009b\7\6\2\2\u009b\13\3\2\2\2\u009c\u009f\5\20\t"+ + "\2\u009d\u009f\5\30\r\2\u009e\u009c\3\2\2\2\u009e\u009d\3\2\2\2\u009f"+ + "\r\3\2\2\2\u00a0\u00a1\5\30\r\2\u00a1\17\3\2\2\2\u00a2\u00a3\5\22\n\2"+ + "\u00a3\u00a8\5\24\13\2\u00a4\u00a5\7\f\2\2\u00a5\u00a7\5\24\13\2\u00a6"+ + "\u00a4\3\2\2\2\u00a7\u00aa\3\2\2\2\u00a8\u00a6\3\2\2\2\u00a8\u00a9\3\2"+ + "\2\2\u00a9\21\3\2\2\2\u00aa\u00a8\3\2\2\2\u00ab\u00b0\7K\2\2\u00ac\u00ad"+ + "\7\7\2\2\u00ad\u00af\7\b\2\2\u00ae\u00ac\3\2\2\2\u00af\u00b2\3\2\2\2\u00b0"+ + "\u00ae\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1\23\3\2\2\2\u00b2\u00b0\3\2\2"+ + "\2\u00b3\u00b6\7L\2\2\u00b4\u00b5\7\65\2\2\u00b5\u00b7\5\30\r\2\u00b6"+ + "\u00b4\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\25\3\2\2\2\u00b8\u00b9\7\30\2"+ + "\2\u00b9\u00ba\7\t\2\2\u00ba\u00bb\7K\2\2\u00bb\u00bc\7L\2\2\u00bc\u00bd"+ + "\3\2\2\2\u00bd\u00c0\7\n\2\2\u00be\u00c1\5\6\4\2\u00bf\u00c1\5\n\6\2\u00c0"+ + "\u00be\3\2\2\2\u00c0\u00bf\3\2\2\2\u00c1\27\3\2\2\2\u00c2\u00c3\b\r\1"+ + "\2\u00c3\u00c4\t\2\2\2\u00c4\u00df\5\30\r\20\u00c5\u00c6\7\t\2\2\u00c6"+ + "\u00c7\5\22\n\2\u00c7\u00c8\7\n\2\2\u00c8\u00c9\5\30\r\17\u00c9\u00df"+ + "\3\2\2\2\u00ca\u00cb\5\32\16\2\u00cb\u00cc\t\3\2\2\u00cc\u00cd\5\30\r"+ + "\3\u00cd\u00df\3\2\2\2\u00ce\u00cf\7\t\2\2\u00cf\u00d0\5\30\r\2\u00d0"+ + "\u00d1\7\n\2\2\u00d1\u00df\3\2\2\2\u00d2\u00df\t\4\2\2\u00d3\u00df\7G"+ + "\2\2\u00d4\u00df\7H\2\2\u00d5\u00df\7I\2\2\u00d6\u00df\7J\2\2\u00d7\u00d8"+ + "\5\32\16\2\u00d8\u00d9\5\62\32\2\u00d9\u00df\3\2\2\2\u00da\u00db\5\62"+ + "\32\2\u00db\u00dc\5\32\16\2\u00dc\u00df\3\2\2\2\u00dd\u00df\5\32\16\2"+ + "\u00de\u00c2\3\2\2\2\u00de\u00c5\3\2\2\2\u00de\u00ca\3\2\2\2\u00de\u00ce"+ + "\3\2\2\2\u00de\u00d2\3\2\2\2\u00de\u00d3\3\2\2\2\u00de\u00d4\3\2\2\2\u00de"+ + "\u00d5\3\2\2\2\u00de\u00d6\3\2\2\2\u00de\u00d7\3\2\2\2\u00de\u00da\3\2"+ + "\2\2\u00de\u00dd\3\2\2\2\u00df\u0106\3\2\2\2\u00e0\u00e1\f\16\2\2\u00e1"+ + "\u00e2\t\5\2\2\u00e2\u0105\5\30\r\17\u00e3\u00e4\f\r\2\2\u00e4\u00e5\t"+ + "\6\2\2\u00e5\u0105\5\30\r\16\u00e6\u00e7\f\f\2\2\u00e7\u00e8\t\7\2\2\u00e8"+ + "\u0105\5\30\r\r\u00e9\u00ea\f\13\2\2\u00ea\u00eb\t\b\2\2\u00eb\u0105\5"+ + "\30\r\f\u00ec\u00ed\f\n\2\2\u00ed\u00ee\t\t\2\2\u00ee\u0105\5\30\r\13"+ + "\u00ef\u00f0\f\t\2\2\u00f0\u00f1\7,\2\2\u00f1\u0105\5\30\r\n\u00f2\u00f3"+ + "\f\b\2\2\u00f3\u00f4\7-\2\2\u00f4\u0105\5\30\r\t\u00f5\u00f6\f\7\2\2\u00f6"+ + "\u00f7\7.\2\2\u00f7\u0105\5\30\r\b\u00f8\u00f9\f\6\2\2\u00f9\u00fa\7/"+ + "\2\2\u00fa\u0105\5\30\r\7\u00fb\u00fc\f\5\2\2\u00fc\u00fd\7\60\2\2\u00fd"+ + "\u0105\5\30\r\6\u00fe\u00ff\f\4\2\2\u00ff\u0100\7\61\2\2\u0100\u0101\5"+ + "\30\r\2\u0101\u0102\7\62\2\2\u0102\u0103\5\30\r\4\u0103\u0105\3\2\2\2"+ + "\u0104\u00e0\3\2\2\2\u0104\u00e3\3\2\2\2\u0104\u00e6\3\2\2\2\u0104\u00e9"+ + "\3\2\2\2\u0104\u00ec\3\2\2\2\u0104\u00ef\3\2\2\2\u0104\u00f2\3\2\2\2\u0104"+ + "\u00f5\3\2\2\2\u0104\u00f8\3\2\2\2\u0104\u00fb\3\2\2\2\u0104\u00fe\3\2"+ + "\2\2\u0105\u0108\3\2\2\2\u0106\u0104\3\2\2\2\u0106\u0107\3\2\2\2\u0107"+ + "\31\3\2\2\2\u0108\u0106\3\2\2\2\u0109\u0110\5\34\17\2\u010a\u0110\5\36"+ + "\20\2\u010b\u0110\5$\23\2\u010c\u0110\5(\25\2\u010d\u0110\5,\27\2\u010e"+ + "\u0110\5.\30\2\u010f\u0109\3\2\2\2\u010f\u010a\3\2\2\2\u010f\u010b\3\2"+ + "\2\2\u010f\u010c\3\2\2\2\u010f\u010d\3\2\2\2\u010f\u010e\3\2\2\2\u0110"+ + "\33\3\2\2\2\u0111\u0118\7\t\2\2\u0112\u0119\5\34\17\2\u0113\u0119\5\36"+ + "\20\2\u0114\u0119\5$\23\2\u0115\u0119\5(\25\2\u0116\u0119\5,\27\2\u0117"+ + "\u0119\5.\30\2\u0118\u0112\3\2\2\2\u0118\u0113\3\2\2\2\u0118\u0114\3\2"+ + "\2\2\u0118\u0115\3\2\2\2\u0118\u0116\3\2\2\2\u0118\u0117\3\2\2\2\u0119"+ + "\u011a\3\2\2\2\u011a\u011d\7\n\2\2\u011b\u011e\5\"\22\2\u011c\u011e\5"+ + " \21\2\u011d\u011b\3\2\2\2\u011d\u011c\3\2\2\2\u011d\u011e\3\2\2\2\u011e"+ + "\35\3\2\2\2\u011f\u0120\7\t\2\2\u0120\u0121\5\22\n\2\u0121\u0128\7\n\2"+ + "\2\u0122\u0129\5\34\17\2\u0123\u0129\5\36\20\2\u0124\u0129\5$\23\2\u0125"+ + "\u0129\5(\25\2\u0126\u0129\5,\27\2\u0127\u0129\5.\30\2\u0128\u0122\3\2"+ + "\2\2\u0128\u0123\3\2\2\2\u0128\u0124\3\2\2\2\u0128\u0125\3\2\2\2\u0128"+ + "\u0126\3\2\2\2\u0128\u0127\3\2\2\2\u0129\37\3\2\2\2\u012a\u012b\7\7\2"+ + "\2\u012b\u012c\5\30\r\2\u012c\u012f\7\b\2\2\u012d\u0130\5\"\22\2\u012e"+ + "\u0130\5 \21\2\u012f\u012d\3\2\2\2\u012f\u012e\3\2\2\2\u012f\u0130\3\2"+ + "\2\2\u0130!\3\2\2\2\u0131\u0134\7\13\2\2\u0132\u0135\5&\24\2\u0133\u0135"+ + "\5*\26\2\u0134\u0132\3\2\2\2\u0134\u0133\3\2\2\2\u0135#\3\2\2\2\u0136"+ + "\u0137\7K\2\2\u0137\u0138\5\"\22\2\u0138%\3\2\2\2\u0139\u013a\7N\2\2\u013a"+ + "\u013d\5\60\31\2\u013b\u013e\5\"\22\2\u013c\u013e\5 \21\2\u013d\u013b"+ + "\3\2\2\2\u013d\u013c\3\2\2\2\u013d\u013e\3\2\2\2\u013e\'\3\2\2\2\u013f"+ + "\u0142\7L\2\2\u0140\u0143\5\"\22\2\u0141\u0143\5 \21\2\u0142\u0140\3\2"+ + "\2\2\u0142\u0141\3\2\2\2\u0142\u0143\3\2\2\2\u0143)\3\2\2\2\u0144\u0147"+ + "\t\n\2\2\u0145\u0148\5\"\22\2\u0146\u0148\5 \21\2\u0147\u0145\3\2\2\2"+ + "\u0147\u0146\3\2\2\2\u0147\u0148\3\2\2\2\u0148+\3\2\2\2\u0149\u014a\7"+ + "\26\2\2\u014a\u015b\7K\2\2\u014b\u014e\5\60\31\2\u014c\u014f\5\"\22\2"+ + "\u014d\u014f\5 \21\2\u014e\u014c\3\2\2\2\u014e\u014d\3\2\2\2\u014e\u014f"+ + "\3\2\2\2\u014f\u015c\3\2\2\2\u0150\u0151\7\7\2\2\u0151\u0152\5\30\r\2"+ + "\u0152\u0153\7\b\2\2\u0153\u0155\3\2\2\2\u0154\u0150\3\2\2\2\u0155\u0156"+ + "\3\2\2\2\u0156\u0154\3\2\2\2\u0156\u0157\3\2\2\2\u0157\u0159\3\2\2\2\u0158"+ + "\u015a\5\"\22\2\u0159\u0158\3\2\2\2\u0159\u015a\3\2\2\2\u015a\u015c\3"+ + "\2\2\2\u015b\u014b\3\2\2\2\u015b\u0154\3\2\2\2\u015c-\3\2\2\2\u015d\u0160"+ + "\7F\2\2\u015e\u0161\5\"\22\2\u015f\u0161\5 \21\2\u0160\u015e\3\2\2\2\u0160"+ + "\u015f\3\2\2\2\u0160\u0161\3\2\2\2\u0161/\3\2\2\2\u0162\u016b\7\t\2\2"+ + "\u0163\u0168\5\30\r\2\u0164\u0165\7\f\2\2\u0165\u0167\5\30\r\2\u0166\u0164"+ + "\3\2\2\2\u0167\u016a\3\2\2\2\u0168\u0166\3\2\2\2\u0168\u0169\3\2\2\2\u0169"+ + "\u016c\3\2\2\2\u016a\u0168\3\2\2\2\u016b\u0163\3\2\2\2\u016b\u016c\3\2"+ + "\2\2\u016c\u016d\3\2\2\2\u016d\u016e\7\n\2\2\u016e\61\3\2\2\2\u016f\u0170"+ + "\t\13\2\2\u0170\63\3\2\2\2-\67BJSX\\`eimqv}\u0082\u0086\u0088\u008e\u0093"+ + "\u0097\u009e\u00a8\u00b0\u00b6\u00c0\u00de\u0104\u0106\u010f\u0118\u011d"+ + "\u0128\u012f\u0134\u013d\u0142\u0147\u014e\u0156\u0159\u015b\u0160\u0168"+ + "\u016b"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java index d731b57676b..7997b57ae6b 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserBaseVisitor.java @@ -116,6 +116,13 @@ class PlanAParserBaseVisitor extends AbstractParseTreeVisitor implements P * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitEmpty(PlanAParser.EmptyContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitEmptyscope(PlanAParser.EmptyscopeContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -151,6 +158,13 @@ class PlanAParserBaseVisitor extends AbstractParseTreeVisitor implements P * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitDeclvar(PlanAParser.DeclvarContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitTrap(PlanAParser.TrapContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java index 7470f3b6ad5..326a62555e7 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAParserVisitor.java @@ -113,6 +113,12 @@ interface PlanAParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitEmpty(PlanAParser.EmptyContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#emptyscope}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitEmptyscope(PlanAParser.EmptyscopeContext ctx); /** * Visit a parse tree produced by {@link PlanAParser#initializer}. * @param ctx the parse tree @@ -143,6 +149,12 @@ interface PlanAParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitDeclvar(PlanAParser.DeclvarContext ctx); + /** + * Visit a parse tree produced by {@link PlanAParser#trap}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTrap(PlanAParser.TrapContext ctx); /** * Visit a parse tree produced by the {@code comp} * labeled alternative in {@link PlanAParser#expression}. diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java index c893cd38324..d87c9a9e1c4 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAPlugin.java @@ -34,7 +34,7 @@ public final class PlanAPlugin extends Plugin { return "Plan A scripting language for Elasticsearch"; } - public void onModule(ScriptModule module) { + public void onModule(final ScriptModule module) { module.addScriptEngine(PlanAScriptEngineService.class); } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java index 69736f311e6..858bf21021c 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/PlanAScriptEngineService.java @@ -40,117 +40,199 @@ import java.security.ProtectionDomain; import java.util.HashMap; import java.util.Map; +/** + * Implementation of a ScriptEngine for the Plan A language. + */ public class PlanAScriptEngineService extends AbstractComponent implements ScriptEngineService { + /** + * Standard name of the Plan A language. + */ public static final String NAME = "plan-a"; - // default settings, used unless otherwise specified + + /** + * Default compiler settings to be used. + */ private static final CompilerSettings DEFAULT_COMPILER_SETTINGS = new CompilerSettings(); - public static final String NUMERIC_OVERFLOW = "numeric_overflow"; + /** + * Permissions context used during compilation. + */ + private static final AccessControlContext COMPILATION_CONTEXT; - // TODO: how should custom definitions be specified? + /** + * Setup the allowed permissions. + */ + static { + final Permissions none = new Permissions(); + none.setReadOnly(); + COMPILATION_CONTEXT = new AccessControlContext(new ProtectionDomain[] { + new ProtectionDomain(null, none) + }); + } + + /** + * Used only for testing. + */ private Definition definition = null; + /** + * Used only for testing. + */ + void setDefinition(final Definition definition) { + this.definition = definition; + } + + /** + * Constructor. + * @param settings The settings to initialize the engine with. + */ @Inject - public PlanAScriptEngineService(Settings settings) { + public PlanAScriptEngineService(final Settings settings) { super(settings); } - public void setDefinition(final Definition definition) { - this.definition = new Definition(definition); - } - + /** + * Get the type name(s) for the language. + * @return Always contains only the single name of the language. + */ @Override public String[] types() { return new String[] { NAME }; } + /** + * Get the extension(s) for the language. + * @return Always contains only the single extension of the language. + */ @Override public String[] extensions() { return new String[] { NAME }; } + /** + * Whether or not the engine is secure. + * @return Always true as the engine should be secure at runtime. + */ @Override public boolean sandboxed() { return true; } - // context used during compilation - private static final AccessControlContext COMPILATION_CONTEXT; - static { - Permissions none = new Permissions(); - none.setReadOnly(); - COMPILATION_CONTEXT = new AccessControlContext(new ProtectionDomain[] { - new ProtectionDomain(null, none) - }); - } - + /** + * Compiles a Plan A script with the specified parameters. + * @param script The code to be compiled. + * @param params The params used to modify the compiler settings on a per script basis. + * @return Compiled script object represented by an {@link Executable}. + */ @Override - public Object compile(String script, Map params) { + public Object compile(final String script, final Map params) { final CompilerSettings compilerSettings; + if (params.isEmpty()) { + // Use the default settings. compilerSettings = DEFAULT_COMPILER_SETTINGS; } else { - // custom settings + // Use custom settings specified by params. compilerSettings = new CompilerSettings(); - Map clone = new HashMap<>(params); - String value = clone.remove(NUMERIC_OVERFLOW); + Map copy = new HashMap<>(params); + String value = copy.remove(CompilerSettings.NUMERIC_OVERFLOW); + if (value != null) { - // TODO: can we get a real boolean parser in here? compilerSettings.setNumericOverflow(Boolean.parseBoolean(value)); } - if (!clone.isEmpty()) { - throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + clone); + + value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER); + + if (value != null) { + compilerSettings.setMaxLoopCounter(Integer.parseInt(value)); + } + + if (!copy.isEmpty()) { + throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy); } } - // check we ourselves are not being called by unprivileged code - SecurityManager sm = System.getSecurityManager(); + + // Check we ourselves are not being called by unprivileged code. + final SecurityManager sm = System.getSecurityManager(); + if (sm != null) { sm.checkPermission(new SpecialPermission()); } - // create our loader (which loads compiled code with no permissions) - Compiler.Loader loader = AccessController.doPrivileged(new PrivilegedAction() { + + // Create our loader (which loads compiled code with no permissions). + final Compiler.Loader loader = AccessController.doPrivileged(new PrivilegedAction() { @Override public Compiler.Loader run() { return new Compiler.Loader(getClass().getClassLoader()); } }); - // drop all permissions to actually compile the code itself + + // Drop all permissions to actually compile the code itself. return AccessController.doPrivileged(new PrivilegedAction() { @Override public Executable run() { - return Compiler.compile(loader, "something", script, definition, compilerSettings); + return Compiler.compile(loader, "unknown", script, definition, compilerSettings); } }, COMPILATION_CONTEXT); } + /** + * Retrieve an {@link ExecutableScript} for later use. + * @param compiledScript A previously compiled script. + * @param vars The variables to be used in the script. + * @return An {@link ExecutableScript} with the currently specified variables. + */ @Override - public ExecutableScript executable(CompiledScript compiledScript, Map vars) { - return new ScriptImpl((Executable) compiledScript.compiled(), vars, null); + public ExecutableScript executable(final CompiledScript compiledScript, final Map vars) { + return new ScriptImpl((Executable)compiledScript.compiled(), vars, null); } + /** + * Retrieve a {@link SearchScript} for later use. + * @param compiledScript A previously compiled script. + * @param lookup The object that ultimately allows access to search fields. + * @param vars The variables to be used in the script. + * @return An {@link SearchScript} with the currently specified variables. + */ @Override - public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map vars) { + public SearchScript search(final CompiledScript compiledScript, final SearchLookup lookup, final Map vars) { return new SearchScript() { + /** + * Get the search script that will have access to search field values. + * @param context The LeafReaderContext to be used. + * @return A script that will have the search fields from the current context available for use. + */ @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { - return new ScriptImpl((Executable) compiledScript.compiled(), vars, lookup.getLeafSearchLookup(context)); + public LeafSearchScript getLeafSearchScript(final LeafReaderContext context) throws IOException { + return new ScriptImpl((Executable)compiledScript.compiled(), vars, lookup.getLeafSearchLookup(context)); } + /** + * Whether or not the score is needed. + * @return Always true as it's assumed score is needed. + */ @Override public boolean needsScores() { - return true; // TODO: maybe even do these different and more like expressions. + return true; } }; } + /** + * Action taken when a script is removed from the cache. + * @param script The removed script. + */ @Override - public void scriptRemoved(CompiledScript script) { - // nothing to do + public void scriptRemoved(final CompiledScript script) { + // Nothing to do. } + /** + * Action taken when the engine is closed. + */ @Override public void close() throws IOException { - // nothing to do + // Nothing to do. } } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java index 3910cdc96f7..9ce00118c58 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/ScriptImpl.java @@ -19,76 +19,127 @@ package org.elasticsearch.plan.a; +import java.util.HashMap; +import java.util.Map; + import org.apache.lucene.search.Scorer; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.search.lookup.LeafSearchLookup; -import java.util.HashMap; -import java.util.Map; - +/** + * ScriptImpl can be used as either an {@link ExecutableScript} or a {@link LeafSearchScript} + * to run a previously compiled Plan A script. + */ final class ScriptImpl implements ExecutableScript, LeafSearchScript { - final Executable executable; - final Map variables; - final LeafSearchLookup lookup; - - ScriptImpl(Executable executable, Map vars, LeafSearchLookup lookup) { + /** + * The Plan A Executable script that can be run. + */ + private final Executable executable; + + /** + * A map that can be used to access input parameters at run-time. + */ + private final Map variables; + + /** + * The lookup is used to access search field values at run-time. + */ + private final LeafSearchLookup lookup; + + /** + * Creates a ScriptImpl for the a previously compiled Plan A script. + * @param executable The previously compiled Plan A script. + * @param vars The initial variables to run the script with. + * @param lookup The lookup to allow search fields to be available if this is run as a search script. + */ + ScriptImpl(final Executable executable, final Map vars, final LeafSearchLookup lookup) { this.executable = executable; this.lookup = lookup; this.variables = new HashMap<>(); + if (vars != null) { variables.putAll(vars); } + if (lookup != null) { variables.putAll(lookup.asMap()); } } - + + /** + * Set a variable for the script to be run against. + * @param name The variable name. + * @param value The variable value. + */ @Override - public void setNextVar(String name, Object value) { + public void setNextVar(final String name, final Object value) { variables.put(name, value); } - + + /** + * Run the script. + * @return The script result. + */ @Override public Object run() { return executable.execute(variables); } - - @Override - public float runAsFloat() { - return ((Number) run()).floatValue(); - } - - @Override - public long runAsLong() { - return ((Number) run()).longValue(); - } + /** + * Run the script. + * @return The script result as a double. + */ @Override public double runAsDouble() { - return ((Number) run()).doubleValue(); - } - - @Override - public Object unwrap(Object value) { - return value; + return ((Number)run()).doubleValue(); } + /** + * Run the script. + * @return The script result as a float. + */ @Override - public void setScorer(Scorer scorer) { - variables.put("_score", new ScoreAccessor(scorer)); + public float runAsFloat() { + return ((Number)run()).floatValue(); } + /** + * Run the script. + * @return The script result as a long. + */ @Override - public void setDocument(int doc) { + public long runAsLong() { + return ((Number)run()).longValue(); + } + + /** + * Sets the scorer to be accessible within a script. + * @param scorer The scorer used for a search. + */ + @Override + public void setScorer(final Scorer scorer) { + variables.put("#score", new ScoreAccessor(scorer)); + } + + /** + * Sets the current document. + * @param doc The current document. + */ + @Override + public void setDocument(final int doc) { if (lookup != null) { lookup.setDocument(doc); } } + /** + * Sets the current source. + * @param source The current source. + */ @Override - public void setSource(Map source) { + public void setSource(final Map source) { if (lookup != null) { lookup.source().setSource(source); } diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java index 3bb5ae463e7..f132d1edf2f 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Utility.java @@ -1,5 +1,3 @@ -package org.elasticsearch.plan.a; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -19,6 +17,8 @@ package org.elasticsearch.plan.a; * under the License. */ +package org.elasticsearch.plan.a; + public class Utility { public static boolean NumberToboolean(final Number value) { return value.longValue() != 0; @@ -157,7 +157,7 @@ public class Utility { } public static Character byteToCharacter(final byte value) { - return (char)(byte)value; + return (char)value; } public static Integer byteToInteger(final byte value) { @@ -193,7 +193,7 @@ public class Utility { } public static Character shortToCharacter(final short value) { - return (char)(short)value; + return (char)value; } public static Integer shortToInteger(final short value) { @@ -211,11 +211,11 @@ public class Utility { public static Double shortToDouble(final short value) { return (double)value; } - + public static boolean ShortToboolean(final Short value) { return value != 0; } - + public static char ShortTochar(final Short value) { return (char)value.shortValue(); } @@ -261,19 +261,19 @@ public class Utility { } public static int CharacterToint(final Character value) { - return (int)value; + return value; } public static long CharacterTolong(final Character value) { - return (long)value; + return value; } public static float CharacterTofloat(final Character value) { - return (float)value; + return value; } public static double CharacterTodouble(final Character value) { - return (double)value; + return value; } public static Boolean CharacterToBoolean(final Character value) { @@ -317,7 +317,7 @@ public class Utility { } public static Character intToCharacter(final int value) { - return (char)(int)value; + return (char)value; } public static Long intToLong(final int value) { @@ -331,7 +331,7 @@ public class Utility { public static Double intToDouble(final int value) { return (double)value; } - + public static boolean IntegerToboolean(final Integer value) { return value != 0; } @@ -353,7 +353,7 @@ public class Utility { } public static Character longToCharacter(final long value) { - return (char)(long)value; + return (char)value; } public static Integer longToInteger(final long value) { @@ -367,7 +367,7 @@ public class Utility { public static Double longToDouble(final long value) { return (double)value; } - + public static boolean LongToboolean(final Long value) { return value != 0; } @@ -389,7 +389,7 @@ public class Utility { } public static Character floatToCharacter(final float value) { - return (char)(float)value; + return (char)value; } public static Integer floatToInteger(final float value) { @@ -403,11 +403,11 @@ public class Utility { public static Double floatToDouble(final float value) { return (double)value; } - + public static boolean FloatToboolean(final Float value) { return value != 0; } - + public static char FloatTochar(final Float value) { return (char)value.floatValue(); } @@ -425,7 +425,7 @@ public class Utility { } public static Character doubleToCharacter(final double value) { - return (char)(double)value; + return (char)value; } public static Integer doubleToInteger(final double value) { @@ -435,23 +435,23 @@ public class Utility { public static Long doubleToLong(final double value) { return (long)value; } - + public static Float doubleToFloat(final double value) { return (float)value; } - + public static boolean DoubleToboolean(final Double value) { return value != 0; } - + public static char DoubleTochar(final Double value) { return (char)value.doubleValue(); } - + // although divide by zero is guaranteed, the special overflow case is not caught. // its not needed for remainder because it is not possible there. // see https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.17.2 - + /** * Integer divide without overflow * @throws ArithmeticException on overflow or divide-by-zero @@ -462,7 +462,7 @@ public class Utility { } return x / y; } - + /** * Long divide without overflow * @throws ArithmeticException on overflow or divide-by-zero @@ -667,7 +667,7 @@ public class Utility { } return z; } - + /** * Checks for NaN, result is NaN but operands are finite * @throws ArithmeticException if overflow occurred @@ -680,7 +680,7 @@ public class Utility { } return z; } - + /** * Checks for NaN, result is infinite but operands are finite * @throws ArithmeticException if overflow occurred @@ -693,7 +693,7 @@ public class Utility { } return z; } - + /** * Checks for NaN, result is NaN but operands are finite * @throws ArithmeticException if overflow occurred @@ -706,7 +706,7 @@ public class Utility { } return z; } - + /** * Adds two floats but throws {@code ArithmeticException} * if the result overflows. @@ -714,7 +714,7 @@ public class Utility { public static float addWithoutOverflow(float x, float y) { return checkInfFloat(x, y, x + y); } - + /** * Adds two doubles but throws {@code ArithmeticException} * if the result overflows. @@ -722,7 +722,7 @@ public class Utility { public static double addWithoutOverflow(double x, double y) { return checkInfDouble(x, y, x + y); } - + /** * Subtracts two floats but throws {@code ArithmeticException} * if the result overflows. @@ -730,7 +730,7 @@ public class Utility { public static float subtractWithoutOverflow(float x, float y) { return checkInfFloat(x, y, x - y); } - + /** * Subtracts two doubles but throws {@code ArithmeticException} * if the result overflows. @@ -738,7 +738,7 @@ public class Utility { public static double subtractWithoutOverflow(double x, double y) { return checkInfDouble(x, y , x - y); } - + /** * Multiplies two floats but throws {@code ArithmeticException} * if the result overflows. @@ -746,7 +746,7 @@ public class Utility { public static float multiplyWithoutOverflow(float x, float y) { return checkInfFloat(x, y, x * y); } - + /** * Multiplies two doubles but throws {@code ArithmeticException} * if the result overflows. @@ -754,7 +754,7 @@ public class Utility { public static double multiplyWithoutOverflow(double x, double y) { return checkInfDouble(x, y, x * y); } - + /** * Divides two floats but throws {@code ArithmeticException} * if the result overflows, or would create NaN from finite @@ -763,7 +763,7 @@ public class Utility { public static float divideWithoutOverflow(float x, float y) { return checkNaNFloat(x, y, checkInfFloat(x, y, x / y)); } - + /** * Divides two doubles but throws {@code ArithmeticException} * if the result overflows, or would create NaN from finite @@ -772,7 +772,7 @@ public class Utility { public static double divideWithoutOverflow(double x, double y) { return checkNaNDouble(x, y, checkInfDouble(x, y, x / y)); } - + /** * Takes remainder two floats but throws {@code ArithmeticException} * if the result would create NaN from finite inputs ({@code y == 0}) @@ -780,7 +780,7 @@ public class Utility { public static float remainderWithoutOverflow(float x, float y) { return checkNaNFloat(x, y, x % y); } - + /** * Divides two doubles but throws {@code ArithmeticException} * if the result would create NaN from finite inputs ({@code y == 0}) diff --git a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java index 4f3361576c4..34b5b535afa 100644 --- a/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java +++ b/plugins/lang-plan-a/src/main/java/org/elasticsearch/plan/a/Writer.java @@ -21,6 +21,7 @@ package org.elasticsearch.plan.a; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.tree.ParseTree; +import org.elasticsearch.script.ScoreAccessor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; @@ -34,11 +35,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import static org.elasticsearch.plan.a.Adapter.ExpressionMetadata; -import static org.elasticsearch.plan.a.Adapter.ExtNodeMetadata; -import static org.elasticsearch.plan.a.Adapter.ExternalMetadata; -import static org.elasticsearch.plan.a.Adapter.StatementMetadata; -import static org.elasticsearch.plan.a.Adapter.error; import static org.elasticsearch.plan.a.Definition.Cast; import static org.elasticsearch.plan.a.Definition.Constructor; import static org.elasticsearch.plan.a.Definition.Field; @@ -46,6 +42,11 @@ import static org.elasticsearch.plan.a.Definition.Method; import static org.elasticsearch.plan.a.Definition.Sort; import static org.elasticsearch.plan.a.Definition.Transform; import static org.elasticsearch.plan.a.Definition.Type; +import static org.elasticsearch.plan.a.Metadata.ExpressionMetadata; +import static org.elasticsearch.plan.a.Metadata.ExtNodeMetadata; +import static org.elasticsearch.plan.a.Metadata.ExternalMetadata; +import static org.elasticsearch.plan.a.Metadata.StatementMetadata; +import static org.elasticsearch.plan.a.Metadata.error; import static org.elasticsearch.plan.a.PlanAParser.ADD; import static org.elasticsearch.plan.a.PlanAParser.AfterthoughtContext; import static org.elasticsearch.plan.a.PlanAParser.ArgumentsContext; @@ -69,6 +70,7 @@ import static org.elasticsearch.plan.a.PlanAParser.DecltypeContext; import static org.elasticsearch.plan.a.PlanAParser.DeclvarContext; import static org.elasticsearch.plan.a.PlanAParser.DoContext; import static org.elasticsearch.plan.a.PlanAParser.EmptyContext; +import static org.elasticsearch.plan.a.PlanAParser.EmptyscopeContext; import static org.elasticsearch.plan.a.PlanAParser.ExprContext; import static org.elasticsearch.plan.a.PlanAParser.ExpressionContext; import static org.elasticsearch.plan.a.PlanAParser.ExtbraceContext; @@ -103,7 +105,10 @@ import static org.elasticsearch.plan.a.PlanAParser.SUB; import static org.elasticsearch.plan.a.PlanAParser.SingleContext; import static org.elasticsearch.plan.a.PlanAParser.SourceContext; import static org.elasticsearch.plan.a.PlanAParser.StatementContext; +import static org.elasticsearch.plan.a.PlanAParser.ThrowContext; +import static org.elasticsearch.plan.a.PlanAParser.TrapContext; import static org.elasticsearch.plan.a.PlanAParser.TrueContext; +import static org.elasticsearch.plan.a.PlanAParser.TryContext; import static org.elasticsearch.plan.a.PlanAParser.USH; import static org.elasticsearch.plan.a.PlanAParser.UnaryContext; import static org.elasticsearch.plan.a.PlanAParser.WhileContext; @@ -112,18 +117,13 @@ class Writer extends PlanAParserBaseVisitor { private static class Branch { final ParserRuleContext source; - Label begin; - Label end; - Label tru; - Label fals; + Label begin = null; + Label end = null; + Label tru = null; + Label fals = null; private Branch(final ParserRuleContext source) { this.source = source; - - begin = null; - end = null; - tru = null; - fals = null; } } @@ -131,200 +131,206 @@ class Writer extends PlanAParserBaseVisitor { final static String CLASS_NAME = BASE_CLASS_NAME + "$CompiledPlanAExecutable"; private final static org.objectweb.asm.Type BASE_CLASS_TYPE = org.objectweb.asm.Type.getType(Executable.class); private final static org.objectweb.asm.Type CLASS_TYPE = - org.objectweb.asm.Type.getType("L" + CLASS_NAME.replace(".", "/") + ";"); + org.objectweb.asm.Type.getType("L" + CLASS_NAME.replace(".", "/") + ";"); + + private final static org.objectweb.asm.Type PLAN_A_ERROR_TYPE = org.objectweb.asm.Type.getType(PlanAError.class); private final static org.objectweb.asm.commons.Method CONSTRUCTOR = org.objectweb.asm.commons.Method.getMethod( - "void (org.elasticsearch.plan.a.Definition, java.lang.String, java.lang.String)"); + "void (org.elasticsearch.plan.a.Definition, java.lang.String, java.lang.String)"); private final static org.objectweb.asm.commons.Method EXECUTE = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object execute(java.util.Map)"); + "java.lang.Object execute(java.util.Map)"); private final static String SIGNATURE = "(Ljava/util/Map;)Ljava/lang/Object;"; private final static org.objectweb.asm.Type DEFINITION_TYPE = org.objectweb.asm.Type.getType(Definition.class); + private final static org.objectweb.asm.Type MAP_TYPE = org.objectweb.asm.Type.getType(Map.class); + private final static org.objectweb.asm.commons.Method MAP_GET = + org.objectweb.asm.commons.Method.getMethod("Object get(Object)"); + + private final static org.objectweb.asm.Type SCORE_ACCESSOR_TYPE = org.objectweb.asm.Type.getType(ScoreAccessor.class); + private final static org.objectweb.asm.commons.Method SCORE_ACCESSOR_FLOAT = + org.objectweb.asm.commons.Method.getMethod("float floatValue()"); + private final static org.objectweb.asm.commons.Method DEF_METHOD_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object methodCall(java.lang.Object, java.lang.String, " + + "java.lang.Object methodCall(java.lang.Object, java.lang.String, " + "org.elasticsearch.plan.a.Definition, java.lang.Object[], boolean[])"); private final static org.objectweb.asm.commons.Method DEF_ARRAY_STORE = org.objectweb.asm.commons.Method.getMethod( - "void arrayStore(java.lang.Object, java.lang.Object, java.lang.Object, " + + "void arrayStore(java.lang.Object, java.lang.Object, java.lang.Object, " + "org.elasticsearch.plan.a.Definition, boolean, boolean)"); private final static org.objectweb.asm.commons.Method DEF_ARRAY_LOAD = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object arrayLoad(java.lang.Object, java.lang.Object, " + + "java.lang.Object arrayLoad(java.lang.Object, java.lang.Object, " + "org.elasticsearch.plan.a.Definition, boolean)"); private final static org.objectweb.asm.commons.Method DEF_FIELD_STORE = org.objectweb.asm.commons.Method.getMethod( - "void fieldStore(java.lang.Object, java.lang.Object, java.lang.String, " + + "void fieldStore(java.lang.Object, java.lang.Object, java.lang.String, " + "org.elasticsearch.plan.a.Definition, boolean)"); private final static org.objectweb.asm.commons.Method DEF_FIELD_LOAD = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object fieldLoad(java.lang.Object, java.lang.String, org.elasticsearch.plan.a.Definition)"); + "java.lang.Object fieldLoad(java.lang.Object, java.lang.String, org.elasticsearch.plan.a.Definition)"); private final static org.objectweb.asm.commons.Method DEF_NOT_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object not(java.lang.Object)"); + "java.lang.Object not(java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_NEG_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object neg(java.lang.Object)"); + "java.lang.Object neg(java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_MUL_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object mul(java.lang.Object, java.lang.Object)"); + "java.lang.Object mul(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_DIV_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object div(java.lang.Object, java.lang.Object)"); + "java.lang.Object div(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_REM_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object rem(java.lang.Object, java.lang.Object)"); + "java.lang.Object rem(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_ADD_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object add(java.lang.Object, java.lang.Object)"); + "java.lang.Object add(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_SUB_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object sub(java.lang.Object, java.lang.Object)"); + "java.lang.Object sub(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_LSH_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object lsh(java.lang.Object, java.lang.Object)"); + "java.lang.Object lsh(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_RSH_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object rsh(java.lang.Object, java.lang.Object)"); + "java.lang.Object rsh(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_USH_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object ush(java.lang.Object, java.lang.Object)"); + "java.lang.Object ush(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_AND_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object and(java.lang.Object, java.lang.Object)"); + "java.lang.Object and(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_XOR_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object xor(java.lang.Object, java.lang.Object)"); + "java.lang.Object xor(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_OR_CALL = org.objectweb.asm.commons.Method.getMethod( - "java.lang.Object or(java.lang.Object, java.lang.Object)"); + "java.lang.Object or(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_EQ_CALL = org.objectweb.asm.commons.Method.getMethod( - "boolean eq(java.lang.Object, java.lang.Object)"); + "boolean eq(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_LT_CALL = org.objectweb.asm.commons.Method.getMethod( - "boolean lt(java.lang.Object, java.lang.Object)"); + "boolean lt(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_LTE_CALL = org.objectweb.asm.commons.Method.getMethod( - "boolean lte(java.lang.Object, java.lang.Object)"); + "boolean lte(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_GT_CALL = org.objectweb.asm.commons.Method.getMethod( - "boolean gt(java.lang.Object, java.lang.Object)"); + "boolean gt(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method DEF_GTE_CALL = org.objectweb.asm.commons.Method.getMethod( - "boolean gte(java.lang.Object, java.lang.Object)"); + "boolean gte(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.Type STRINGBUILDER_TYPE = org.objectweb.asm.Type.getType(StringBuilder.class); private final static org.objectweb.asm.commons.Method STRINGBUILDER_CONSTRUCTOR = - org.objectweb.asm.commons.Method.getMethod("void ()"); + org.objectweb.asm.commons.Method.getMethod("void ()"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_BOOLEAN = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(boolean)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(boolean)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_CHAR = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(char)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(char)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_INT = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(int)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(int)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_LONG = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(long)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(long)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_FLOAT = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(float)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(float)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(double)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(double)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_STRING = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(java.lang.String)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(java.lang.String)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_APPEND_OBJECT = - org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(java.lang.Object)"); + org.objectweb.asm.commons.Method.getMethod("java.lang.StringBuilder append(java.lang.Object)"); private final static org.objectweb.asm.commons.Method STRINGBUILDER_TOSTRING = - org.objectweb.asm.commons.Method.getMethod("java.lang.String toString()"); + org.objectweb.asm.commons.Method.getMethod("java.lang.String toString()"); private final static org.objectweb.asm.commons.Method TOINTEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("int toIntExact(long)"); + org.objectweb.asm.commons.Method.getMethod("int toIntExact(long)"); private final static org.objectweb.asm.commons.Method NEGATEEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("int negateExact(int)"); + org.objectweb.asm.commons.Method.getMethod("int negateExact(int)"); private final static org.objectweb.asm.commons.Method NEGATEEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("long negateExact(long)"); + org.objectweb.asm.commons.Method.getMethod("long negateExact(long)"); private final static org.objectweb.asm.commons.Method MULEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("int multiplyExact(int, int)"); + org.objectweb.asm.commons.Method.getMethod("int multiplyExact(int, int)"); private final static org.objectweb.asm.commons.Method MULEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("long multiplyExact(long, long)"); + org.objectweb.asm.commons.Method.getMethod("long multiplyExact(long, long)"); private final static org.objectweb.asm.commons.Method ADDEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("int addExact(int, int)"); + org.objectweb.asm.commons.Method.getMethod("int addExact(int, int)"); private final static org.objectweb.asm.commons.Method ADDEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("long addExact(long, long)"); + org.objectweb.asm.commons.Method.getMethod("long addExact(long, long)"); private final static org.objectweb.asm.commons.Method SUBEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("int subtractExact(int, int)"); + org.objectweb.asm.commons.Method.getMethod("int subtractExact(int, int)"); private final static org.objectweb.asm.commons.Method SUBEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("long subtractExact(long, long)"); + org.objectweb.asm.commons.Method.getMethod("long subtractExact(long, long)"); private final static org.objectweb.asm.commons.Method CHECKEQUALS = - org.objectweb.asm.commons.Method.getMethod("boolean checkEquals(java.lang.Object, java.lang.Object)"); + org.objectweb.asm.commons.Method.getMethod("boolean checkEquals(java.lang.Object, java.lang.Object)"); private final static org.objectweb.asm.commons.Method TOBYTEEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("byte toByteExact(int)"); + org.objectweb.asm.commons.Method.getMethod("byte toByteExact(int)"); private final static org.objectweb.asm.commons.Method TOBYTEEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("byte toByteExact(long)"); + org.objectweb.asm.commons.Method.getMethod("byte toByteExact(long)"); private final static org.objectweb.asm.commons.Method TOBYTEWOOVERFLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("byte toByteWithoutOverflow(float)"); + org.objectweb.asm.commons.Method.getMethod("byte toByteWithoutOverflow(float)"); private final static org.objectweb.asm.commons.Method TOBYTEWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("byte toByteWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("byte toByteWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method TOSHORTEXACT_INT = - org.objectweb.asm.commons.Method.getMethod("short toShortExact(int)"); + org.objectweb.asm.commons.Method.getMethod("short toShortExact(int)"); private final static org.objectweb.asm.commons.Method TOSHORTEXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("short toShortExact(long)"); + org.objectweb.asm.commons.Method.getMethod("short toShortExact(long)"); private final static org.objectweb.asm.commons.Method TOSHORTWOOVERFLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("short toShortWithoutOverflow(float)"); + org.objectweb.asm.commons.Method.getMethod("short toShortWithoutOverflow(float)"); private final static org.objectweb.asm.commons.Method TOSHORTWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("short toShortWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("short toShortWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method TOCHAREXACT_INT = - org.objectweb.asm.commons.Method.getMethod("char toCharExact(int)"); + org.objectweb.asm.commons.Method.getMethod("char toCharExact(int)"); private final static org.objectweb.asm.commons.Method TOCHAREXACT_LONG = - org.objectweb.asm.commons.Method.getMethod("char toCharExact(long)"); + org.objectweb.asm.commons.Method.getMethod("char toCharExact(long)"); private final static org.objectweb.asm.commons.Method TOCHARWOOVERFLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("char toCharWithoutOverflow(float)"); + org.objectweb.asm.commons.Method.getMethod("char toCharWithoutOverflow(float)"); private final static org.objectweb.asm.commons.Method TOCHARWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("char toCharWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("char toCharWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method TOINTWOOVERFLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("int toIntWithoutOverflow(float)"); + org.objectweb.asm.commons.Method.getMethod("int toIntWithoutOverflow(float)"); private final static org.objectweb.asm.commons.Method TOINTWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("int toIntWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("int toIntWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method TOLONGWOOVERFLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("long toLongExactWithoutOverflow(float)"); + org.objectweb.asm.commons.Method.getMethod("long toLongExactWithoutOverflow(float)"); private final static org.objectweb.asm.commons.Method TOLONGWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("long toLongExactWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("long toLongExactWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method TOFLOATWOOVERFLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("float toFloatWithoutOverflow(double)"); + org.objectweb.asm.commons.Method.getMethod("float toFloatWithoutOverflow(double)"); private final static org.objectweb.asm.commons.Method MULWOOVERLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("float multiplyWithoutOverflow(float, float)"); + org.objectweb.asm.commons.Method.getMethod("float multiplyWithoutOverflow(float, float)"); private final static org.objectweb.asm.commons.Method MULWOOVERLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("double multiplyWithoutOverflow(double, double)"); + org.objectweb.asm.commons.Method.getMethod("double multiplyWithoutOverflow(double, double)"); private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_INT = - org.objectweb.asm.commons.Method.getMethod("int divideWithoutOverflow(int, int)"); + org.objectweb.asm.commons.Method.getMethod("int divideWithoutOverflow(int, int)"); private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_LONG = - org.objectweb.asm.commons.Method.getMethod("long divideWithoutOverflow(long, long)"); + org.objectweb.asm.commons.Method.getMethod("long divideWithoutOverflow(long, long)"); private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("float divideWithoutOverflow(float, float)"); + org.objectweb.asm.commons.Method.getMethod("float divideWithoutOverflow(float, float)"); private final static org.objectweb.asm.commons.Method DIVWOOVERLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("double divideWithoutOverflow(double, double)"); + org.objectweb.asm.commons.Method.getMethod("double divideWithoutOverflow(double, double)"); private final static org.objectweb.asm.commons.Method REMWOOVERLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("float remainderWithoutOverflow(float, float)"); + org.objectweb.asm.commons.Method.getMethod("float remainderWithoutOverflow(float, float)"); private final static org.objectweb.asm.commons.Method REMWOOVERLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("double remainderWithoutOverflow(double, double)"); + org.objectweb.asm.commons.Method.getMethod("double remainderWithoutOverflow(double, double)"); private final static org.objectweb.asm.commons.Method ADDWOOVERLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("float addWithoutOverflow(float, float)"); + org.objectweb.asm.commons.Method.getMethod("float addWithoutOverflow(float, float)"); private final static org.objectweb.asm.commons.Method ADDWOOVERLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("double addWithoutOverflow(double, double)"); + org.objectweb.asm.commons.Method.getMethod("double addWithoutOverflow(double, double)"); private final static org.objectweb.asm.commons.Method SUBWOOVERLOW_FLOAT = - org.objectweb.asm.commons.Method.getMethod("float subtractWithoutOverflow(float, float)"); + org.objectweb.asm.commons.Method.getMethod("float subtractWithoutOverflow(float, float)"); private final static org.objectweb.asm.commons.Method SUBWOOVERLOW_DOUBLE = - org.objectweb.asm.commons.Method.getMethod("double subtractWithoutOverflow(double, double)"); + org.objectweb.asm.commons.Method.getMethod("double subtractWithoutOverflow(double, double)"); - static byte[] write(Adapter adapter) { - Writer writer = new Writer(adapter); + static byte[] write(Metadata metadata) { + Writer writer = new Writer(metadata); return writer.getBytes(); } - private final Adapter adapter; + private final Metadata metadata; private final Definition definition; private final ParseTree root; private final String source; private final CompilerSettings settings; - private final Map branches; - private final Deque jumps; - private final Set strings; + private final Map branches = new HashMap<>(); + private final Deque jumps = new ArrayDeque<>(); + private final Set strings = new HashSet<>(); private ClassWriter writer; private GeneratorAdapter execute; - private Writer(final Adapter adapter) { - this.adapter = adapter; - definition = adapter.definition; - root = adapter.root; - source = adapter.source; - settings = adapter.settings; - - branches = new HashMap<>(); - jumps = new ArrayDeque<>(); - strings = new HashSet<>(); + private Writer(final Metadata metadata) { + this.metadata = metadata; + definition = metadata.definition; + root = metadata.root; + source = metadata.source; + settings = metadata.settings; writeBegin(); writeConstructor(); @@ -377,19 +383,39 @@ class Writer extends PlanAParserBaseVisitor { private void writeExecute() { final int access = Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC; execute = new GeneratorAdapter(access, EXECUTE, SIGNATURE, null, writer); + + final Label fals = new Label(); + final Label end = new Label(); + execute.visitVarInsn(Opcodes.ALOAD, metadata.inputValueSlot); + execute.push("#score"); + execute.invokeInterface(MAP_TYPE, MAP_GET); + execute.dup(); + execute.ifNull(fals); + execute.checkCast(SCORE_ACCESSOR_TYPE); + execute.invokeVirtual(SCORE_ACCESSOR_TYPE, SCORE_ACCESSOR_FLOAT); + execute.goTo(end); + execute.mark(fals); + execute.pop(); + execute.push(0F); + execute.mark(end); + execute.visitVarInsn(Opcodes.FSTORE, metadata.scoreValueSlot); + + execute.push(settings.getMaxLoopCounter()); + execute.visitVarInsn(Opcodes.ISTORE, metadata.loopCounterSlot); + visit(root); execute.endMethod(); } @Override public Void visitSource(final SourceContext ctx) { - final StatementMetadata sourcesmd = adapter.getStatementMetadata(ctx); + final StatementMetadata sourcesmd = metadata.getStatementMetadata(ctx); for (final StatementContext sctx : ctx.statement()) { visit(sctx); } - if (!sourcesmd.allReturn) { + if (!sourcesmd.methodEscape) { execute.visitInsn(Opcodes.ACONST_NULL); execute.returnValue(); } @@ -408,11 +434,11 @@ class Writer extends PlanAParserBaseVisitor { visit(exprctx); final BlockContext blockctx0 = ctx.block(0); - final StatementMetadata blockmd0 = adapter.getStatementMetadata(blockctx0); + final StatementMetadata blockmd0 = metadata.getStatementMetadata(blockctx0); visit(blockctx0); if (els) { - if (!blockmd0.allExit) { + if (!blockmd0.allLast) { execute.goTo(branch.end); } @@ -438,15 +464,20 @@ class Writer extends PlanAParserBaseVisitor { visit(exprctx); final BlockContext blockctx = ctx.block(); - boolean allexit = false; + boolean allLast = false; if (blockctx != null) { - StatementMetadata blocksmd = adapter.getStatementMetadata(blockctx); - allexit = blocksmd.allExit; + final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + allLast = blocksmd.allLast; + writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1); visit(blockctx); + } else if (ctx.empty() != null) { + writeLoopCounter(1); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); } - if (!allexit) { + if (!allLast) { execute.goTo(branch.begin); } @@ -460,23 +491,21 @@ class Writer extends PlanAParserBaseVisitor { public Void visitDo(final DoContext ctx) { final ExpressionContext exprctx = ctx.expression(); final Branch branch = markBranch(ctx, exprctx); + Label start = new Label(); branch.begin = new Label(); branch.end = new Label(); branch.fals = branch.end; + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + jumps.push(branch); + execute.mark(start); + visit(blockctx); execute.mark(branch.begin); - - final BlockContext bctx = ctx.block(); - final StatementMetadata blocksmd = adapter.getStatementMetadata(bctx); - visit(bctx); - visit(exprctx); - - if (!blocksmd.allExit) { - execute.goTo(branch.begin); - } - + writeLoopCounter(blocksmd.count > 0 ? blocksmd.count : 1); + execute.goTo(start); execute.mark(branch.end); jumps.pop(); @@ -506,12 +535,24 @@ class Writer extends PlanAParserBaseVisitor { } final BlockContext blockctx = ctx.block(); - boolean allexit = false; + boolean allLast = false; if (blockctx != null) { - StatementMetadata blocksmd = adapter.getStatementMetadata(blockctx); - allexit = blocksmd.allExit; + StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + allLast = blocksmd.allLast; + + int count = blocksmd.count > 0 ? blocksmd.count : 1; + + if (atctx != null) { + ++count; + } + + writeLoopCounter(count); visit(blockctx); + } else if (ctx.empty() != null) { + writeLoopCounter(1); + } else { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); } if (atctx != null) { @@ -519,7 +560,7 @@ class Writer extends PlanAParserBaseVisitor { visit(atctx); } - if (atctx != null || !allexit) { + if (atctx != null || !allLast) { execute.goTo(start); } @@ -560,14 +601,56 @@ class Writer extends PlanAParserBaseVisitor { return null; } + @Override + public Void visitTry(final TryContext ctx) { + final TrapContext[] trapctxs = new TrapContext[ctx.trap().size()]; + ctx.trap().toArray(trapctxs); + final Branch branch = markBranch(ctx, trapctxs); + + Label end = new Label(); + branch.begin = new Label(); + branch.end = new Label(); + branch.tru = trapctxs.length > 1 ? end : null; + + execute.mark(branch.begin); + + final BlockContext blockctx = ctx.block(); + final StatementMetadata blocksmd = metadata.getStatementMetadata(blockctx); + visit(blockctx); + + if (!blocksmd.allLast) { + execute.goTo(end); + } + + execute.mark(branch.end); + + for (final TrapContext trapctx : trapctxs) { + visit(trapctx); + } + + if (!blocksmd.allLast || trapctxs.length > 1) { + execute.mark(end); + } + + return null; + } + + @Override + public Void visitThrow(final ThrowContext ctx) { + visit(ctx.expression()); + execute.throwException(); + + return null; + } + @Override public Void visitExpr(final ExprContext ctx) { - final StatementMetadata exprsmd = adapter.getStatementMetadata(ctx); + final StatementMetadata exprsmd = metadata.getStatementMetadata(ctx); final ExpressionContext exprctx = ctx.expression(); - final ExpressionMetadata expremd = adapter.getExpressionMetadata(exprctx); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); visit(exprctx); - if (exprsmd.allReturn) { + if (exprsmd.methodEscape) { execute.returnValue(); } else { writePop(expremd.to.type.getSize()); @@ -605,7 +688,9 @@ class Writer extends PlanAParserBaseVisitor { if (declctx != null) { visit(declctx); } else if (exprctx != null) { + final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); visit(exprctx); + writePop(expremd.to.type.getSize()); } else { throw new IllegalStateException(error(ctx) + "Unexpected writer state."); } @@ -615,7 +700,10 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitAfterthought(AfterthoughtContext ctx) { + final ExpressionContext exprctx = ctx.expression(); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(exprctx); visit(ctx.expression()); + writePop(expremd.to.type.getSize()); return null; } @@ -636,7 +724,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitDeclvar(final DeclvarContext ctx) { - final ExpressionMetadata declvaremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata declvaremd = metadata.getExpressionMetadata(ctx); final org.objectweb.asm.Type type = declvaremd.to.type; final Sort sort = declvaremd.to.sort; final int slot = (int)declvaremd.postConst; @@ -666,6 +754,34 @@ class Writer extends PlanAParserBaseVisitor { return null; } + @Override + public Void visitTrap(final TrapContext ctx) { + final StatementMetadata trapsmd = metadata.getStatementMetadata(ctx); + + final Branch branch = getBranch(ctx); + final Label jump = new Label(); + + final BlockContext blockctx = ctx.block(); + final EmptyscopeContext emptyctx = ctx.emptyscope(); + + execute.mark(jump); + writeLoadStoreVariable(ctx, true, trapsmd.exception, trapsmd.slot); + + if (blockctx != null) { + visit(ctx.block()); + } else if (emptyctx == null) { + throw new IllegalStateException(error(ctx) + "Unexpected writer state."); + } + + execute.visitTryCatchBlock(branch.begin, branch.end, jump, trapsmd.exception.type.getInternalName()); + + if (branch.tru != null && !trapsmd.allLast) { + execute.goTo(branch.tru); + } + + return null; + } + @Override public Void visitPrecedence(final PrecedenceContext ctx) { throw new UnsupportedOperationException(error(ctx) + "Unexpected writer state."); @@ -673,7 +789,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitNumeric(final NumericContext ctx) { - final ExpressionMetadata numericemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata numericemd = metadata.getExpressionMetadata(ctx); final Object postConst = numericemd.postConst; if (postConst == null) { @@ -690,7 +806,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitChar(final CharContext ctx) { - final ExpressionMetadata charemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata charemd = metadata.getExpressionMetadata(ctx); final Object postConst = charemd.postConst; if (postConst == null) { @@ -707,7 +823,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitTrue(final TrueContext ctx) { - final ExpressionMetadata trueemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata trueemd = metadata.getExpressionMetadata(ctx); final Object postConst = trueemd.postConst; final Branch branch = getBranch(ctx); @@ -727,7 +843,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitFalse(final FalseContext ctx) { - final ExpressionMetadata falseemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata falseemd = metadata.getExpressionMetadata(ctx); final Object postConst = falseemd.postConst; final Branch branch = getBranch(ctx); @@ -747,7 +863,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitNull(final NullContext ctx) { - final ExpressionMetadata nullemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata nullemd = metadata.getExpressionMetadata(ctx); execute.visitInsn(Opcodes.ACONST_NULL); checkWriteCast(nullemd); @@ -758,7 +874,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExternal(final ExternalContext ctx) { - final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -769,7 +885,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitPostinc(final PostincContext ctx) { - final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -779,7 +895,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitPreinc(final PreincContext ctx) { - final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -789,7 +905,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitUnary(final UnaryContext ctx) { - final ExpressionMetadata unaryemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata unaryemd = metadata.getExpressionMetadata(ctx); final Object postConst = unaryemd.postConst; final Object preConst = unaryemd.preConst; final Branch branch = getBranch(ctx); @@ -891,7 +1007,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitCast(final CastContext ctx) { - final ExpressionMetadata castemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata castemd = metadata.getExpressionMetadata(ctx); final Object postConst = castemd.postConst; if (postConst == null) { @@ -908,7 +1024,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitBinary(final BinaryContext ctx) { - final ExpressionMetadata binaryemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata binaryemd = metadata.getExpressionMetadata(ctx); final Object postConst = binaryemd.postConst; final Object preConst = binaryemd.preConst; final Branch branch = getBranch(ctx); @@ -930,7 +1046,7 @@ class Writer extends PlanAParserBaseVisitor { } final ExpressionContext exprctx0 = ctx.expression(0); - final ExpressionMetadata expremd0 = adapter.getExpressionMetadata(exprctx0); + final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); strings.add(exprctx0); visit(exprctx0); @@ -940,7 +1056,7 @@ class Writer extends PlanAParserBaseVisitor { } final ExpressionContext exprctx1 = ctx.expression(1); - final ExpressionMetadata expremd1 = adapter.getExpressionMetadata(exprctx1); + final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); strings.add(exprctx1); visit(exprctx1); @@ -990,7 +1106,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitComp(final CompContext ctx) { - final ExpressionMetadata compemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata compemd = metadata.getExpressionMetadata(ctx); final Object postConst = compemd.postConst; final Object preConst = compemd.preConst; final Branch branch = getBranch(ctx); @@ -1014,10 +1130,10 @@ class Writer extends PlanAParserBaseVisitor { } } else { final ExpressionContext exprctx0 = ctx.expression(0); - final ExpressionMetadata expremd0 = adapter.getExpressionMetadata(exprctx0); + final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(exprctx0); final ExpressionContext exprctx1 = ctx.expression(1); - final ExpressionMetadata expremd1 = adapter.getExpressionMetadata(exprctx1); + final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(exprctx1); final org.objectweb.asm.Type type = expremd1.to.type; final Sort sort1 = expremd1.to.sort; @@ -1033,9 +1149,9 @@ class Writer extends PlanAParserBaseVisitor { final Label end = new Label(); final boolean eq = (ctx.EQ() != null || ctx.EQR() != null) && (tru || !fals) || - (ctx.NE() != null || ctx.NER() != null) && fals; + (ctx.NE() != null || ctx.NER() != null) && fals; final boolean ne = (ctx.NE() != null || ctx.NER() != null) && (tru || !fals) || - (ctx.EQ() != null || ctx.EQR() != null) && fals; + (ctx.EQ() != null || ctx.EQR() != null) && fals; final boolean lt = ctx.LT() != null && (tru || !fals) || ctx.GTE() != null && fals; final boolean lte = ctx.LTE() != null && (tru || !fals) || ctx.GT() != null && fals; final boolean gt = ctx.GT() != null && (tru || !fals) || ctx.LTE() != null && fals; @@ -1156,7 +1272,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitBool(final BoolContext ctx) { - final ExpressionMetadata boolemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata boolemd = metadata.getExpressionMetadata(ctx); final Object postConst = boolemd.postConst; final Object preConst = boolemd.preConst; final Branch branch = getBranch(ctx); @@ -1255,7 +1371,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitConditional(final ConditionalContext ctx) { - final ExpressionMetadata condemd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata condemd = metadata.getExpressionMetadata(ctx); final Branch branch = getBranch(ctx); final ExpressionContext expr0 = ctx.expression(0); @@ -1286,7 +1402,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitAssignment(final AssignmentContext ctx) { - final ExpressionMetadata expremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(ctx); visit(ctx.extstart()); checkWriteCast(expremd); checkWriteBranch(ctx); @@ -1296,10 +1412,10 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtstart(ExtstartContext ctx) { - final ExternalMetadata startemd = adapter.getExternalMetadata(ctx); + final ExternalMetadata startemd = metadata.getExternalMetadata(ctx); if (startemd.token == ADD) { - final ExpressionMetadata storeemd = adapter.getExpressionMetadata(startemd.storeExpr); + final ExpressionMetadata storeemd = metadata.getExpressionMetadata(startemd.storeExpr); if (startemd.current.sort == Sort.STRING || storeemd.from.sort == Sort.STRING) { writeNewStrings(); @@ -1372,7 +1488,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtcast(final ExtcastContext ctx) { - ExtNodeMetadata castenmd = adapter.getExtNodeMetadata(ctx); + ExtNodeMetadata castenmd = metadata.getExtNodeMetadata(ctx); final ExtprecContext precctx = ctx.extprec(); final ExtcastContext castctx = ctx.extcast(); @@ -1404,7 +1520,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtbrace(final ExtbraceContext ctx) { - final ExpressionContext exprctx = adapter.updateExpressionTree(ctx.expression()); + final ExpressionContext exprctx = metadata.updateExpressionTree(ctx.expression()); visit(exprctx); writeLoadStoreExternal(ctx); @@ -1508,7 +1624,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitExtstring(ExtstringContext ctx) { - final ExtNodeMetadata stringenmd = adapter.getExtNodeMetadata(ctx); + final ExtNodeMetadata stringenmd = metadata.getExtNodeMetadata(ctx); writeConstant(ctx, stringenmd.target); @@ -1531,7 +1647,7 @@ class Writer extends PlanAParserBaseVisitor { @Override public Void visitIncrement(IncrementContext ctx) { - final ExpressionMetadata incremd = adapter.getExpressionMetadata(ctx); + final ExpressionMetadata incremd = metadata.getExpressionMetadata(ctx); final Object postConst = incremd.postConst; if (postConst == null) { @@ -1546,6 +1662,18 @@ class Writer extends PlanAParserBaseVisitor { return null; } + private void writeLoopCounter(final int count) { + final Label end = new Label(); + + execute.iinc(metadata.loopCounterSlot, -count); + execute.visitVarInsn(Opcodes.ILOAD, metadata.loopCounterSlot); + execute.push(0); + execute.ifICmp(GeneratorAdapter.GT, end); + execute.throwException(PLAN_A_ERROR_TYPE, + "The maximum number of statements that can be executed in a loop has been reached."); + execute.mark(end); + } + private void writeConstant(final ParserRuleContext source, final Object constant) { if (constant instanceof Number) { writeNumeric(source, constant); @@ -1618,12 +1746,12 @@ class Writer extends PlanAParserBaseVisitor { private void writeBinaryInstruction(final ParserRuleContext source, final Type type, final int token) { final Sort sort = type.sort; final boolean exact = !settings.getNumericOverflow() && - ((sort == Sort.INT || sort == Sort.LONG) && + ((sort == Sort.INT || sort == Sort.LONG) && (token == MUL || token == DIV || token == ADD || token == SUB) || (sort == Sort.FLOAT || sort == Sort.DOUBLE) && - (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)); + (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)); - // if its a 64-bit shift, fixup the last argument to truncate to 32-bits + // if its a 64-bit shift, fixup the lastSource argument to truncate to 32-bits // note unlike java, this means we still do binary promotion of shifts, // but it keeps things simple -- this check works because we promote shifts. if (sort == Sort.LONG && (token == LSH || token == USH || token == RSH)) { @@ -1683,7 +1811,7 @@ class Writer extends PlanAParserBaseVisitor { } } else { if ((sort == Sort.FLOAT || sort == Sort.DOUBLE) && - (token == LSH || token == USH || token == RSH || token == BWAND || token == BWXOR || token == BWOR)) { + (token == LSH || token == USH || token == RSH || token == BWAND || token == BWXOR || token == BWOR)) { throw new IllegalStateException(error(source) + "Unexpected writer state."); } @@ -1731,122 +1859,122 @@ class Writer extends PlanAParserBaseVisitor { * @return true if an instruction is written, false otherwise */ private boolean writeExactInstruction(final Sort osort, final Sort psort) { - if (psort == Sort.DOUBLE) { - if (osort == Sort.FLOAT) { - execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); - } else if (osort == Sort.FLOAT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); - execute.checkCast(definition.floatobjType.type); - } else if (osort == Sort.LONG) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); - } else if (osort == Sort.LONG_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); - execute.checkCast(definition.longobjType.type); - } else if (osort == Sort.INT) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); - } else if (osort == Sort.INT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); - execute.checkCast(definition.intobjType.type); - } else if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (psort == Sort.FLOAT) { - if (osort == Sort.LONG) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); - } else if (osort == Sort.LONG_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); - execute.checkCast(definition.longobjType.type); - } else if (osort == Sort.INT) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); - } else if (osort == Sort.INT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); - execute.checkCast(definition.intobjType.type); - } else if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (psort == Sort.LONG) { - if (osort == Sort.INT) { - execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); - } else if (osort == Sort.INT_OBJ) { - execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); - execute.checkCast(definition.intobjType.type); - } else if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } - } else if (psort == Sort.INT) { - if (osort == Sort.CHAR) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); - } else if (osort == Sort.CHAR_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); - execute.checkCast(definition.charobjType.type); - } else if (osort == Sort.SHORT) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); - } else if (osort == Sort.SHORT_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); - execute.checkCast(definition.shortobjType.type); - } else if (osort == Sort.BYTE) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); - } else if (osort == Sort.BYTE_OBJ) { - execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); - execute.checkCast(definition.byteobjType.type); - } else { - return false; - } + if (psort == Sort.DOUBLE) { + if (osort == Sort.FLOAT) { + execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); + } else if (osort == Sort.FLOAT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOFLOATWOOVERFLOW_DOUBLE); + execute.checkCast(definition.floatobjType.type); + } else if (osort == Sort.LONG) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); + } else if (osort == Sort.LONG_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_DOUBLE); + execute.checkCast(definition.longobjType.type); + } else if (osort == Sort.INT) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_DOUBLE); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_DOUBLE); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_DOUBLE); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_DOUBLE); + execute.checkCast(definition.byteobjType.type); } else { return false; } + } else if (psort == Sort.FLOAT) { + if (osort == Sort.LONG) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); + } else if (osort == Sort.LONG_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOLONGWOOVERFLOW_FLOAT); + execute.checkCast(definition.longobjType.type); + } else if (osort == Sort.INT) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOINTWOOVERFLOW_FLOAT); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHARWOOVERFLOW_FLOAT); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTWOOVERFLOW_FLOAT); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEWOOVERFLOW_FLOAT); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else if (psort == Sort.LONG) { + if (osort == Sort.INT) { + execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); + } else if (osort == Sort.INT_OBJ) { + execute.invokeStatic(definition.mathType.type, TOINTEXACT_LONG); + execute.checkCast(definition.intobjType.type); + } else if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_LONG); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_LONG); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_LONG); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else if (psort == Sort.INT) { + if (osort == Sort.CHAR) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); + } else if (osort == Sort.CHAR_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOCHAREXACT_INT); + execute.checkCast(definition.charobjType.type); + } else if (osort == Sort.SHORT) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); + } else if (osort == Sort.SHORT_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOSHORTEXACT_INT); + execute.checkCast(definition.shortobjType.type); + } else if (osort == Sort.BYTE) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); + } else if (osort == Sort.BYTE_OBJ) { + execute.invokeStatic(definition.utilityType.type, TOBYTEEXACT_INT); + execute.checkCast(definition.byteobjType.type); + } else { + return false; + } + } else { + return false; + } return true; } private void writeLoadStoreExternal(final ParserRuleContext source) { - final ExtNodeMetadata sourceenmd = adapter.getExtNodeMetadata(source); - final ExternalMetadata parentemd = adapter.getExternalMetadata(sourceenmd.parent); + final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); + final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); final boolean length = "#length".equals(sourceenmd.target); final boolean array = "#brace".equals(sourceenmd.target); @@ -1868,7 +1996,7 @@ class Writer extends PlanAParserBaseVisitor { if (length) { execute.arrayLength(); } else if (sourceenmd.last && parentemd.storeExpr != null) { - final ExpressionMetadata expremd = adapter.getExpressionMetadata(parentemd.storeExpr); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); final boolean cat = strings.contains(parentemd.storeExpr); if (cat) { @@ -1934,7 +2062,7 @@ class Writer extends PlanAParserBaseVisitor { boolean exact = false; if (!settings.getNumericOverflow() && expremd.typesafe && sourceenmd.type.sort != Sort.DEF && - (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)) { + (token == MUL || token == DIV || token == REM || token == ADD || token == SUB)) { exact = writeExactInstruction(sourceenmd.type.sort, sourceenmd.promote.sort); } @@ -1973,7 +2101,7 @@ class Writer extends PlanAParserBaseVisitor { final boolean store, final boolean variable, final boolean field, final boolean name, final boolean array, final boolean shortcut) { - final ExtNodeMetadata sourceemd = adapter.getExtNodeMetadata(source); + final ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); if (variable) { writeLoadStoreVariable(source, store, sourceemd.type, (int)sourceemd.target); @@ -2030,9 +2158,9 @@ class Writer extends PlanAParserBaseVisitor { private void writeLoadStoreField(final ParserRuleContext source, final boolean store, final String name) { if (store) { - final ExtNodeMetadata sourceemd = adapter.getExtNodeMetadata(source); - final ExternalMetadata parentemd = adapter.getExternalMetadata(sourceemd.parent); - final ExpressionMetadata expremd = adapter.getExpressionMetadata(parentemd.storeExpr); + final ExtNodeMetadata sourceemd = metadata.getExtNodeMetadata(source); + final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceemd.parent); + final ExpressionMetadata expremd = metadata.getExpressionMetadata(parentemd.storeExpr); execute.push(name); execute.loadThis(); @@ -2054,12 +2182,12 @@ class Writer extends PlanAParserBaseVisitor { if (type.sort == Sort.DEF) { final ExtbraceContext bracectx = (ExtbraceContext)source; - final ExpressionMetadata expremd0 = adapter.getExpressionMetadata(bracectx.expression()); + final ExpressionMetadata expremd0 = metadata.getExpressionMetadata(bracectx.expression()); if (store) { - final ExtNodeMetadata braceenmd = adapter.getExtNodeMetadata(bracectx); - final ExternalMetadata parentemd = adapter.getExternalMetadata(braceenmd.parent); - final ExpressionMetadata expremd1 = adapter.getExpressionMetadata(parentemd.storeExpr); + final ExtNodeMetadata braceenmd = metadata.getExtNodeMetadata(bracectx); + final ExternalMetadata parentemd = metadata.getExternalMetadata(braceenmd.parent); + final ExpressionMetadata expremd1 = metadata.getExpressionMetadata(parentemd.storeExpr); execute.loadThis(); execute.getField(CLASS_TYPE, "definition", DEFINITION_TYPE); @@ -2118,8 +2246,8 @@ class Writer extends PlanAParserBaseVisitor { } private void writeNewExternal(final ExtnewContext source) { - final ExtNodeMetadata sourceenmd = adapter.getExtNodeMetadata(source); - final ExternalMetadata parentemd = adapter.getExternalMetadata(sourceenmd.parent); + final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); + final ExternalMetadata parentemd = metadata.getExternalMetadata(sourceenmd.parent); final boolean makearray = "#makearray".equals(sourceenmd.target); final boolean constructor = sourceenmd.target instanceof Constructor; @@ -2155,7 +2283,7 @@ class Writer extends PlanAParserBaseVisitor { } private void writeCallExternal(final ExtcallContext source) { - final ExtNodeMetadata sourceenmd = adapter.getExtNodeMetadata(source); + final ExtNodeMetadata sourceenmd = metadata.getExtNodeMetadata(source); final boolean method = sourceenmd.target instanceof Method; final boolean def = sourceenmd.target instanceof String; @@ -2205,7 +2333,7 @@ class Writer extends PlanAParserBaseVisitor { for (int argument = 0; argument < arguments.size(); ++argument) { execute.dup(); execute.push(argument); - execute.push(adapter.getExpressionMetadata(arguments.get(argument)).typesafe); + execute.push(metadata.getExpressionMetadata(arguments.get(argument)).typesafe); execute.arrayStore(definition.booleanType.type); } diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicAPITests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicAPITests.java new file mode 100644 index 00000000000..d81c4029a79 --- /dev/null +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/BasicAPITests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plan.a; + +public class BasicAPITests extends ScriptTestCase { + + public void testListIterator() { + assertEquals(3, exec("List x = new ArrayList(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + + "int total = 0; while (y.hasNext()) total += y.next(); return total;")); + assertEquals(3, exec("List x = new ArrayList(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + assertEquals("abc", exec("List x = new ArrayList(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + + "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); + assertEquals(3, exec("def x = new ArrayList(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + + "def total = 0; while (y.hasNext()) total += y.next(); return total;")); + } + + public void testSetIterator() { + assertEquals(3, exec("Set x = new HashSet(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + + "int total = 0; while (y.hasNext()) total += y.next(); return total;")); + assertEquals(3, exec("Set x = new HashSet(); x.add(2); x.add(3); x.add(-2); Iterator y = x.iterator(); " + + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + assertEquals("abc", exec("Set x = new HashSet(); x.add(\"a\"); x.add(\"b\"); x.add(\"c\"); " + + "Iterator y = x.iterator(); String total = \"\"; while (y.hasNext()) total += y.next(); return total;")); + assertEquals(3, exec("def x = new HashSet(); x.add(2); x.add(3); x.add(-2); def y = x.iterator(); " + + "def total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + } + + public void testMapIterator() { + assertEquals(3, exec("Map x = new HashMap(); x.put(2, 2); x.put(3, 3); x.put(-2, -2); Iterator y = x.keySet().iterator(); " + + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + assertEquals(3, exec("Map x = new HashMap(); x.put(2, 2); x.put(3, 3); x.put(-2, -2); Iterator y = x.values().iterator(); " + + "int total = 0; while (y.hasNext()) total += (int)y.next(); return total;")); + } +} diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java index 4603a669df2..2f8966d581f 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowDisabledTests.java @@ -24,14 +24,14 @@ import java.util.Map; /** Tests floating point overflow with numeric overflow disabled */ public class FloatOverflowDisabledTests extends ScriptTestCase { - + /** wire overflow to false for all tests */ @Override public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "false")); + return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "false")); } - public void testAssignmentAdditionOverflow() { + public void testAssignmentAdditionOverflow() { // float try { exec("float x = 3.4028234663852886E38f; x += 3.4028234663852886E38f; return x;"); @@ -41,7 +41,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = -3.4028234663852886E38f; x += -3.4028234663852886E38f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double try { exec("double x = 1.7976931348623157E308; x += 1.7976931348623157E308; return x;"); @@ -52,8 +52,8 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - - public void testAssignmentSubtractionOverflow() { + + public void testAssignmentSubtractionOverflow() { // float try { exec("float x = 3.4028234663852886E38f; x -= -3.4028234663852886E38f; return x;"); @@ -63,7 +63,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = -3.4028234663852886E38f; x -= 3.4028234663852886E38f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double try { exec("double x = 1.7976931348623157E308; x -= -1.7976931348623157E308; return x;"); @@ -74,7 +74,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testAssignmentMultiplicationOverflow() { // float try { @@ -85,7 +85,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = 3.4028234663852886E38f; x *= -3.4028234663852886E38f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double try { exec("double x = 1.7976931348623157E308; x *= 1.7976931348623157E308; return x;"); @@ -96,7 +96,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testAssignmentDivisionOverflow() { // float try { @@ -111,7 +111,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = 1.0f; x /= 0.0f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double try { exec("double x = 1.7976931348623157E308; x /= 4.9E-324; return x;"); @@ -137,7 +137,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testAdditionConst() throws Exception { try { exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;"); @@ -148,7 +148,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testSubtraction() throws Exception { try { exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;"); @@ -159,7 +159,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testSubtractionConst() throws Exception { try { exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;"); @@ -170,7 +170,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testMultiplication() throws Exception { try { exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;"); @@ -181,7 +181,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testMultiplicationConst() throws Exception { try { exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;"); @@ -211,7 +211,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testDivisionConst() throws Exception { try { exec("return 3.4028234663852886E38f / 1.401298464324817E-45f;"); @@ -230,7 +230,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testDivisionNaN() throws Exception { // float division, constant division, and assignment try { @@ -245,7 +245,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = 0f; x /= 0f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double division, constant division, and assignment try { exec("double x = 0.0; double y = 0.0; return x / y;"); @@ -260,7 +260,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} } - + public void testRemainderNaN() throws Exception { // float division, constant division, and assignment try { @@ -275,7 +275,7 @@ public class FloatOverflowDisabledTests extends ScriptTestCase { exec("float x = 1f; x %= 0f; return x;"); fail("didn't hit expected exception"); } catch (ArithmeticException expected) {} - + // double division, constant division, and assignment try { exec("double x = 1.0; double y = 0.0; return x % y;"); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java index 02a738de71e..c858e211faa 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/FloatOverflowEnabledTests.java @@ -24,49 +24,49 @@ import java.util.Map; /** Tests floating point overflow with numeric overflow enabled */ public class FloatOverflowEnabledTests extends ScriptTestCase { - + /** wire overflow to true for all tests */ @Override public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "true")); + return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "true")); } - public void testAssignmentAdditionOverflow() { + public void testAssignmentAdditionOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x += 3.4028234663852886E38f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; x += -3.4028234663852886E38f; return x;")); - + // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x += 1.7976931348623157E308; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; x += -1.7976931348623157E308; return x;")); } - - public void testAssignmentSubtractionOverflow() { + + public void testAssignmentSubtractionOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x -= -3.4028234663852886E38f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; x -= 3.4028234663852886E38f; return x;")); - + // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x -= -1.7976931348623157E308; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; x -= 1.7976931348623157E308; return x;")); } - + public void testAssignmentMultiplicationOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x *= 3.4028234663852886E38f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x *= -3.4028234663852886E38f; return x;")); - + // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x *= 1.7976931348623157E308; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = 1.7976931348623157E308; x *= -1.7976931348623157E308; return x;")); } - + public void testAssignmentDivisionOverflow() { // float assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x /= 1.401298464324817E-45f; return x;")); assertEquals(Float.NEGATIVE_INFINITY, exec("float x = 3.4028234663852886E38f; x /= -1.401298464324817E-45f; return x;")); assertEquals(Float.POSITIVE_INFINITY, exec("float x = 1.0f; x /= 0.0f; return x;")); - + // double assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; x /= 4.9E-324; return x;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = 1.7976931348623157E308; x /= -4.9E-324; return x;")); @@ -74,32 +74,32 @@ public class FloatOverflowEnabledTests extends ScriptTestCase { } public void testAddition() throws Exception { - assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x + y;")); + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x + y;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x + y;")); } - + public void testAdditionConst() throws Exception { - assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;")); + assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f + 3.4028234663852886E38f;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 + 1.7976931348623157E308;")); } - + public void testSubtraction() throws Exception { - assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;")); + assertEquals(Float.NEGATIVE_INFINITY, exec("float x = -3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x - y;")); assertEquals(Double.NEGATIVE_INFINITY, exec("double x = -1.7976931348623157E308; double y = 1.7976931348623157E308; return x - y;")); } - + public void testSubtractionConst() throws Exception { - assertEquals(Float.NEGATIVE_INFINITY, exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;")); + assertEquals(Float.NEGATIVE_INFINITY, exec("return -3.4028234663852886E38f - 3.4028234663852886E38f;")); assertEquals(Double.NEGATIVE_INFINITY, exec("return -1.7976931348623157E308 - 1.7976931348623157E308;")); } - + public void testMultiplication() throws Exception { - assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;")); + assertEquals(Float.POSITIVE_INFINITY, exec("float x = 3.4028234663852886E38f; float y = 3.4028234663852886E38f; return x * y;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 1.7976931348623157E308; return x * y;")); } - + public void testMultiplicationConst() throws Exception { - assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;")); + assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f * 3.4028234663852886E38f;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 * 1.7976931348623157E308;")); } @@ -109,32 +109,32 @@ public class FloatOverflowEnabledTests extends ScriptTestCase { assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.7976931348623157E308; double y = 4.9E-324; return x / y;")); assertEquals(Double.POSITIVE_INFINITY, exec("double x = 1.0; double y = 0.0; return x / y;")); } - + public void testDivisionConst() throws Exception { assertEquals(Float.POSITIVE_INFINITY, exec("return 3.4028234663852886E38f / 1.401298464324817E-45f;")); assertEquals(Float.POSITIVE_INFINITY, exec("return 1.0f / 0.0f;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.7976931348623157E308 / 4.9E-324;")); assertEquals(Double.POSITIVE_INFINITY, exec("return 1.0 / 0.0;")); } - + public void testDivisionNaN() throws Exception { // float division, constant division, and assignment assertTrue(Float.isNaN((Float) exec("float x = 0f; float y = 0f; return x / y;"))); assertTrue(Float.isNaN((Float) exec("return 0f / 0f;"))); assertTrue(Float.isNaN((Float) exec("float x = 0f; x /= 0f; return x;"))); - + // double division, constant division, and assignment assertTrue(Double.isNaN((Double) exec("double x = 0.0; double y = 0.0; return x / y;"))); assertTrue(Double.isNaN((Double) exec("return 0.0 / 0.0;"))); assertTrue(Double.isNaN((Double) exec("double x = 0.0; x /= 0.0; return x;"))); } - + public void testRemainderNaN() throws Exception { // float division, constant division, and assignment assertTrue(Float.isNaN((Float) exec("float x = 1f; float y = 0f; return x % y;"))); assertTrue(Float.isNaN((Float) exec("return 1f % 0f;"))); assertTrue(Float.isNaN((Float) exec("float x = 1f; x %= 0f; return x;"))); - + // double division, constant division, and assignment assertTrue(Double.isNaN((Double) exec("double x = 1.0; double y = 0.0; return x % y;"))); assertTrue(Double.isNaN((Double) exec("return 1.0 % 0.0;"))); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java index dbffb11f0d0..86b4f46a765 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowDisabledTests.java @@ -24,11 +24,11 @@ import java.util.Map; /** Tests integer overflow with numeric overflow disabled */ public class IntegerOverflowDisabledTests extends ScriptTestCase { - + /** wire overflow to true for all tests */ @Override public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "false")); + return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "false")); } public void testAssignmentAdditionOverflow() { @@ -42,7 +42,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("byte x = 0; x += -129; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // short try { exec("short x = 0; x += 32768; return x;"); @@ -53,7 +53,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("byte x = 0; x += -32769; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // char try { exec("char x = 0; x += 65536; return x;"); @@ -64,7 +64,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("char x = 0; x += -65536; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // int try { exec("int x = 1; x += 2147483647; return x;"); @@ -75,7 +75,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("int x = -2; x += -2147483647; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = 1; x += 9223372036854775807L; return x;"); @@ -87,7 +87,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testAssignmentSubtractionOverflow() { // byte try { @@ -99,7 +99,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("byte x = 0; x -= 129; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // short try { exec("short x = 0; x -= -32768; return x;"); @@ -110,7 +110,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("byte x = 0; x -= 32769; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // char try { exec("char x = 0; x -= -65536; return x;"); @@ -121,7 +121,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("char x = 0; x -= 65536; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // int try { exec("int x = 1; x -= -2147483647; return x;"); @@ -132,7 +132,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("int x = -2; x -= 2147483647; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = 1; x -= -9223372036854775807L; return x;"); @@ -144,7 +144,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testAssignmentMultiplicationOverflow() { // byte try { @@ -156,7 +156,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("byte x = 2; x *= -128; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // char try { exec("char x = 2; x *= 65536; return x;"); @@ -167,7 +167,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("char x = 2; x *= -65536; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // int try { exec("int x = 2; x *= 2147483647; return x;"); @@ -178,7 +178,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("int x = 2; x *= -2147483647; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = 2; x *= 9223372036854775807L; return x;"); @@ -190,7 +190,7 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testAssignmentDivisionOverflow() { // byte try { @@ -203,108 +203,108 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("short x = (short) -32768; x /= -1; return x;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + // cannot happen for char: unsigned - + // int try { exec("int x = -2147483647 - 1; x /= -1; return x;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = -9223372036854775807L - 1L; x /=-1L; return x;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testIncrementOverFlow() throws Exception { // byte try { exec("byte x = 127; ++x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("byte x = 127; x++; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("byte x = (byte) -128; --x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("byte x = (byte) -128; x--; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // short try { exec("short x = 32767; ++x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("short x = 32767; x++; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("short x = (short) -32768; --x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("short x = (short) -32768; x--; return x;"); } catch (ArithmeticException expected) {} - + // char try { exec("char x = 65535; ++x; return x;"); } catch (ArithmeticException expected) {} - + try { exec("char x = 65535; x++; return x;"); } catch (ArithmeticException expected) {} - + try { exec("char x = (char) 0; --x; return x;"); } catch (ArithmeticException expected) {} - + try { exec("char x = (char) 0; x--; return x;"); } catch (ArithmeticException expected) {} - + // int try { exec("int x = 2147483647; ++x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("int x = 2147483647; x++; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("int x = (int) -2147483648L; --x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("int x = (int) -2147483648L; x--; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + // long try { exec("long x = 9223372036854775807L; ++x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = 9223372036854775807L; x++; return x;"); fail("did not get expected exception"); @@ -320,68 +320,68 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testAddition() throws Exception { try { exec("int x = 2147483647; int y = 2147483647; return x + y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testAdditionConst() throws Exception { try { exec("return 2147483647 + 2147483647;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("return 9223372036854775807L + 9223372036854775807L;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - - + + public void testSubtraction() throws Exception { try { exec("int x = -10; int y = 2147483647; return x - y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = -10L; long y = 9223372036854775807L; return x - y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testSubtractionConst() throws Exception { try { exec("return -10 - 2147483647;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("return -10L - 9223372036854775807L;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testMultiplication() throws Exception { try { exec("int x = 2147483647; int y = 2147483647; return x * y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testMultiplicationConst() throws Exception { try { exec("return 2147483647 * 2147483647;"); @@ -399,13 +399,13 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { exec("int x = -2147483647 - 1; int y = -1; return x / y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = -9223372036854775808L; long y = -1L; return x / y;"); fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testDivisionConst() throws Exception { try { exec("return (-2147483648) / -1;"); @@ -417,25 +417,25 @@ public class IntegerOverflowDisabledTests extends ScriptTestCase { fail("should have hit exception"); } catch (ArithmeticException expected) {} } - + public void testNegationOverflow() throws Exception { try { exec("int x = -2147483648; x = -x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = -9223372036854775808L; x = -x; return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} } - + public void testNegationOverflowConst() throws Exception { try { exec("int x = -(-2147483648); return x;"); fail("did not get expected exception"); } catch (ArithmeticException expected) {} - + try { exec("long x = -(-9223372036854775808L); return x;"); fail("did not get expected exception"); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java index cdab0e89fe6..ab5f82fd6d2 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/IntegerOverflowEnabledTests.java @@ -24,148 +24,148 @@ import java.util.Map; /** Tests integer overflow with numeric overflow enabled */ public class IntegerOverflowEnabledTests extends ScriptTestCase { - + /** wire overflow to true for all tests */ @Override public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, "true")); + return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, "true")); } public void testAssignmentAdditionOverflow() { // byte assertEquals((byte)(0 + 128), exec("byte x = 0; x += 128; return x;")); assertEquals((byte)(0 + -129), exec("byte x = 0; x += -129; return x;")); - + // short assertEquals((short)(0 + 32768), exec("short x = 0; x += 32768; return x;")); assertEquals((short)(0 + -32769), exec("short x = 0; x += -32769; return x;")); - + // char assertEquals((char)(0 + 65536), exec("char x = 0; x += 65536; return x;")); assertEquals((char)(0 + -65536), exec("char x = 0; x += -65536; return x;")); - + // int assertEquals(1 + 2147483647, exec("int x = 1; x += 2147483647; return x;")); assertEquals(-2 + -2147483647, exec("int x = -2; x += -2147483647; return x;")); - + // long assertEquals(1L + 9223372036854775807L, exec("long x = 1; x += 9223372036854775807L; return x;")); assertEquals(-2L + -9223372036854775807L, exec("long x = -2; x += -9223372036854775807L; return x;")); } - + public void testAssignmentSubtractionOverflow() { // byte assertEquals((byte)(0 - -128), exec("byte x = 0; x -= -128; return x;")); assertEquals((byte)(0 - 129), exec("byte x = 0; x -= 129; return x;")); - + // short assertEquals((short)(0 - -32768), exec("short x = 0; x -= -32768; return x;")); assertEquals((short)(0 - 32769), exec("short x = 0; x -= 32769; return x;")); - + // char assertEquals((char)(0 - -65536), exec("char x = 0; x -= -65536; return x;")); assertEquals((char)(0 - 65536), exec("char x = 0; x -= 65536; return x;")); - + // int assertEquals(1 - -2147483647, exec("int x = 1; x -= -2147483647; return x;")); assertEquals(-2 - 2147483647, exec("int x = -2; x -= 2147483647; return x;")); - + // long assertEquals(1L - -9223372036854775807L, exec("long x = 1; x -= -9223372036854775807L; return x;")); assertEquals(-2L - 9223372036854775807L, exec("long x = -2; x -= 9223372036854775807L; return x;")); } - + public void testAssignmentMultiplicationOverflow() { // byte assertEquals((byte) (2 * 128), exec("byte x = 2; x *= 128; return x;")); assertEquals((byte) (2 * -128), exec("byte x = 2; x *= -128; return x;")); - + // char assertEquals((char) (2 * 65536), exec("char x = 2; x *= 65536; return x;")); assertEquals((char) (2 * -65536), exec("char x = 2; x *= -65536; return x;")); - + // int assertEquals(2 * 2147483647, exec("int x = 2; x *= 2147483647; return x;")); assertEquals(2 * -2147483647, exec("int x = 2; x *= -2147483647; return x;")); - + // long assertEquals(2L * 9223372036854775807L, exec("long x = 2; x *= 9223372036854775807L; return x;")); assertEquals(2L * -9223372036854775807L, exec("long x = 2; x *= -9223372036854775807L; return x;")); } - + public void testAssignmentDivisionOverflow() { // byte assertEquals((byte) (-128 / -1), exec("byte x = (byte) -128; x /= -1; return x;")); // short assertEquals((short) (-32768 / -1), exec("short x = (short) -32768; x /= -1; return x;")); - + // cannot happen for char: unsigned - + // int assertEquals((-2147483647 - 1) / -1, exec("int x = -2147483647 - 1; x /= -1; return x;")); - + // long assertEquals((-9223372036854775807L - 1L) / -1L, exec("long x = -9223372036854775807L - 1L; x /=-1L; return x;")); } - + public void testIncrementOverFlow() throws Exception { // byte assertEquals((byte) 128, exec("byte x = 127; ++x; return x;")); assertEquals((byte) 128, exec("byte x = 127; x++; return x;")); assertEquals((byte) -129, exec("byte x = (byte) -128; --x; return x;")); assertEquals((byte) -129, exec("byte x = (byte) -128; x--; return x;")); - + // short assertEquals((short) 32768, exec("short x = 32767; ++x; return x;")); assertEquals((short) 32768, exec("short x = 32767; x++; return x;")); assertEquals((short) -32769, exec("short x = (short) -32768; --x; return x;")); assertEquals((short) -32769, exec("short x = (short) -32768; x--; return x;")); - + // char assertEquals((char) 65536, exec("char x = 65535; ++x; return x;")); assertEquals((char) 65536, exec("char x = 65535; x++; return x;")); assertEquals((char) -1, exec("char x = (char) 0; --x; return x;")); assertEquals((char) -1, exec("char x = (char) 0; x--; return x;")); - + // int - assertEquals(2147483647 + 1, exec("int x = 2147483647; ++x; return x;")); + assertEquals(2147483647 + 1, exec("int x = 2147483647; ++x; return x;")); assertEquals(2147483647 + 1, exec("int x = 2147483647; x++; return x;")); - assertEquals(-2147483648 - 1, exec("int x = (int) -2147483648L; --x; return x;")); + assertEquals(-2147483648 - 1, exec("int x = (int) -2147483648L; --x; return x;")); assertEquals(-2147483648 - 1, exec("int x = (int) -2147483648L; x--; return x;")); - + // long - assertEquals(9223372036854775807L + 1L, exec("long x = 9223372036854775807L; ++x; return x;")); + assertEquals(9223372036854775807L + 1L, exec("long x = 9223372036854775807L; ++x; return x;")); assertEquals(9223372036854775807L + 1L, exec("long x = 9223372036854775807L; x++; return x;")); assertEquals(-9223372036854775807L - 1L - 1L, exec("long x = -9223372036854775807L - 1L; --x; return x;")); assertEquals(-9223372036854775807L - 1L - 1L, exec("long x = -9223372036854775807L - 1L; x--; return x;")); } - + public void testAddition() throws Exception { - assertEquals(2147483647 + 2147483647, exec("int x = 2147483647; int y = 2147483647; return x + y;")); + assertEquals(2147483647 + 2147483647, exec("int x = 2147483647; int y = 2147483647; return x + y;")); assertEquals(9223372036854775807L + 9223372036854775807L, exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x + y;")); } - + public void testAdditionConst() throws Exception { - assertEquals(2147483647 + 2147483647, exec("return 2147483647 + 2147483647;")); + assertEquals(2147483647 + 2147483647, exec("return 2147483647 + 2147483647;")); assertEquals(9223372036854775807L + 9223372036854775807L, exec("return 9223372036854775807L + 9223372036854775807L;")); } - + public void testSubtraction() throws Exception { - assertEquals(-10 - 2147483647, exec("int x = -10; int y = 2147483647; return x - y;")); + assertEquals(-10 - 2147483647, exec("int x = -10; int y = 2147483647; return x - y;")); assertEquals(-10L - 9223372036854775807L, exec("long x = -10L; long y = 9223372036854775807L; return x - y;")); } - + public void testSubtractionConst() throws Exception { - assertEquals(-10 - 2147483647, exec("return -10 - 2147483647;")); + assertEquals(-10 - 2147483647, exec("return -10 - 2147483647;")); assertEquals(-10L - 9223372036854775807L, exec("return -10L - 9223372036854775807L;")); } - + public void testMultiplication() throws Exception { - assertEquals(2147483647 * 2147483647, exec("int x = 2147483647; int y = 2147483647; return x * y;")); + assertEquals(2147483647 * 2147483647, exec("int x = 2147483647; int y = 2147483647; return x * y;")); assertEquals(9223372036854775807L * 9223372036854775807L, exec("long x = 9223372036854775807L; long y = 9223372036854775807L; return x * y;")); } - + public void testMultiplicationConst() throws Exception { assertEquals(2147483647 * 2147483647, exec("return 2147483647 * 2147483647;")); assertEquals(9223372036854775807L * 9223372036854775807L, exec("return 9223372036854775807L * 9223372036854775807L;")); @@ -175,17 +175,17 @@ public class IntegerOverflowEnabledTests extends ScriptTestCase { assertEquals((-2147483647 - 1) / -1, exec("int x = -2147483648; int y = -1; return x / y;")); assertEquals((-9223372036854775807L - 1L) / -1L, exec("long x = -9223372036854775808L; long y = -1L; return x / y;")); } - + public void testDivisionConst() throws Exception { assertEquals((-2147483647 - 1) / -1, exec("return (-2147483648) / -1;")); assertEquals((-9223372036854775807L - 1L) / -1L, exec("return (-9223372036854775808L) / -1L;")); } - + public void testNegationOverflow() throws Exception { assertEquals(-(-2147483647 - 1), exec("int x = -2147483648; x = -x; return x;")); assertEquals(-(-9223372036854775807L - 1L), exec("long x = -9223372036854775808L; x = -x; return x;")); } - + public void testNegationOverflowConst() throws Exception { assertEquals(-(-2147483647 - 1), exec("int x = -(-2147483648); return x;")); assertEquals(-(-9223372036854775807L - 1L), exec("long x = -(-9223372036854775808L); return x;")); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java index c2c19ccb03a..5e0b0035ceb 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/PlanARestIT.java @@ -21,22 +21,15 @@ package org.elasticsearch.plan.a; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; /** Runs yaml rest tests */ public class PlanARestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(PlanAPlugin.class); - } - public PlanARestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java index e5084392f99..8f2991c3d0c 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptEngineTests.java @@ -35,6 +35,7 @@ public class ScriptEngineTests extends ScriptTestCase { assertEquals(3, ((Number)value).intValue()); } + @SuppressWarnings("unchecked") // We know its Map because we put them there in the test public void testMapAccess() { Map vars = new HashMap<>(); Map obj2 = new HashMap<>(); @@ -54,6 +55,7 @@ public class ScriptEngineTests extends ScriptTestCase { assertEquals("2", value); } + @SuppressWarnings("unchecked") // We know its Map because we put them there ourselves public void testAccessListInScript() { Map vars = new HashMap<>(); Map obj2 = new HashMap<>(); diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java index 5b4948036f3..8ff87bd2a0d 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/ScriptTestCase.java @@ -48,7 +48,7 @@ public abstract class ScriptTestCase extends ESTestCase { /** Compiles and returns the result of {@code script} with access to {@code vars} */ public Object exec(String script, Map vars) { - return exec(script, vars, Collections.singletonMap(PlanAScriptEngineService.NUMERIC_OVERFLOW, Boolean.toString(random().nextBoolean()))); + return exec(script, vars, Collections.singletonMap(CompilerSettings.NUMERIC_OVERFLOW, Boolean.toString(random().nextBoolean()))); } /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ diff --git a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java index 277778e7e76..e4dbce83122 100644 --- a/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java +++ b/plugins/lang-plan-a/src/test/java/org/elasticsearch/plan/a/WhenThingsGoWrongTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.plan.a; +import java.text.ParseException; import java.util.Collections; public class WhenThingsGoWrongTests extends ScriptTestCase { @@ -40,7 +41,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { fail("should have hit cce"); } catch (ClassCastException expected) {} } - + public void testBogusParameter() { try { exec("return 5;", null, Collections.singletonMap("bogusParameterKey", "bogusParameterValue")); @@ -49,4 +50,83 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { assertTrue(expected.getMessage().contains("Unrecognized compile-time parameter")); } } + + public void testInfiniteLoops() { + try { + exec("boolean x = true; while (x) {}"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("while (true) {int y = 5}"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("while (true) { boolean x = true; while (x) {} }"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("while (true) { boolean x = false; while (x) {} }"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("boolean x = true; for (;x;) {}"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("for (;;) {int x = 5}"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("def x = true; do {int y = 5;} while (x)"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + + try { + exec("try { int x } catch (PlanAError error) {}"); + fail("should have hit ParseException"); + } catch (RuntimeException expected) { + assertTrue(expected.getMessage().contains( + "unexpected token ['PlanAError'] was expecting one of [TYPE].")); + } + + } + + public void testLoopLimits() { + exec("for (int x = 0; x < 9999; ++x) {}"); + + try { + exec("for (int x = 0; x < 10000; ++x) {}"); + fail("should have hit PlanAError"); + } catch (PlanAError expected) { + assertTrue(expected.getMessage().contains( + "The maximum number of statements that can be executed in a loop has been reached.")); + } + } } diff --git a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml b/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml index 6259780bfb4..04a5a7a2051 100644 --- a/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml +++ b/plugins/lang-plan-a/src/test/resources/rest-api-spec/test/plan_a/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: lang-plan-a } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java index d7d0ca662c8..ee0a707644f 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/LangPythonScriptRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.script.python; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.python.PythonPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class LangPythonScriptRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(PythonPlugin.class); - } - public LangPythonScriptRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/mapper-attachments/build.gradle b/plugins/mapper-attachments/build.gradle index 70741c2094d..340832af6f8 100644 --- a/plugins/mapper-attachments/build.gradle +++ b/plugins/mapper-attachments/build.gradle @@ -61,8 +61,6 @@ dependencies { compile 'org.apache.commons:commons-compress:1.10' } -compileJava.options.compilerArgs << '-Xlint:-cast,-rawtypes' - forbiddenPatterns { exclude '**/*.docx' exclude '**/*.pdf' diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index 7c54e6f17ce..082adb958ec 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -27,6 +27,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -70,6 +71,9 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; public class AttachmentMapper extends FieldMapper { private static ESLogger logger = ESLoggerFactory.getLogger("mapper.attachment"); + public static final Setting INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING = Setting.boolSetting("index.mapping.attachment.ignore_errors", true, false, Setting.Scope.INDEX); + public static final Setting INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING = Setting.boolSetting("index.mapping.attachment.detect_language", false, false, Setting.Scope.INDEX); + public static final Setting INDEX_ATTACHMENT_INDEXED_CHARS_SETTING = Setting.intSetting("index.mapping.attachment.indexed_chars", 100000, false, Setting.Scope.INDEX); public static final String CONTENT_TYPE = "attachment"; @@ -124,23 +128,23 @@ public class AttachmentMapper extends FieldMapper { private Boolean langDetect = null; - private Mapper.Builder contentBuilder; + private Mapper.Builder contentBuilder; - private Mapper.Builder titleBuilder = stringField(FieldNames.TITLE); + private Mapper.Builder titleBuilder = stringField(FieldNames.TITLE); - private Mapper.Builder nameBuilder = stringField(FieldNames.NAME); + private Mapper.Builder nameBuilder = stringField(FieldNames.NAME); - private Mapper.Builder authorBuilder = stringField(FieldNames.AUTHOR); + private Mapper.Builder authorBuilder = stringField(FieldNames.AUTHOR); - private Mapper.Builder keywordsBuilder = stringField(FieldNames.KEYWORDS); + private Mapper.Builder keywordsBuilder = stringField(FieldNames.KEYWORDS); - private Mapper.Builder dateBuilder = dateField(FieldNames.DATE); + private Mapper.Builder dateBuilder = dateField(FieldNames.DATE); - private Mapper.Builder contentTypeBuilder = stringField(FieldNames.CONTENT_TYPE); + private Mapper.Builder contentTypeBuilder = stringField(FieldNames.CONTENT_TYPE); - private Mapper.Builder contentLengthBuilder = integerField(FieldNames.CONTENT_LENGTH); + private Mapper.Builder contentLengthBuilder = integerField(FieldNames.CONTENT_LENGTH); - private Mapper.Builder languageBuilder = stringField(FieldNames.LANGUAGE); + private Mapper.Builder languageBuilder = stringField(FieldNames.LANGUAGE); public Builder(String name) { super(name, new AttachmentFieldType(), new AttachmentFieldType()); @@ -148,47 +152,47 @@ public class AttachmentMapper extends FieldMapper { this.contentBuilder = stringField(FieldNames.CONTENT); } - public Builder content(Mapper.Builder content) { + public Builder content(Mapper.Builder content) { this.contentBuilder = content; return this; } - public Builder date(Mapper.Builder date) { + public Builder date(Mapper.Builder date) { this.dateBuilder = date; return this; } - public Builder author(Mapper.Builder author) { + public Builder author(Mapper.Builder author) { this.authorBuilder = author; return this; } - public Builder title(Mapper.Builder title) { + public Builder title(Mapper.Builder title) { this.titleBuilder = title; return this; } - public Builder name(Mapper.Builder name) { + public Builder name(Mapper.Builder name) { this.nameBuilder = name; return this; } - public Builder keywords(Mapper.Builder keywords) { + public Builder keywords(Mapper.Builder keywords) { this.keywordsBuilder = keywords; return this; } - public Builder contentType(Mapper.Builder contentType) { + public Builder contentType(Mapper.Builder contentType) { this.contentTypeBuilder = contentType; return this; } - public Builder contentLength(Mapper.Builder contentType) { + public Builder contentLength(Mapper.Builder contentType) { this.contentLengthBuilder = contentType; return this; } - public Builder language(Mapper.Builder language) { + public Builder language(Mapper.Builder language) { this.languageBuilder = language; return this; } @@ -202,7 +206,7 @@ public class AttachmentMapper extends FieldMapper { if (contentBuilder instanceof FieldMapper.Builder == false) { throw new IllegalStateException("content field for attachment must be a field mapper"); } - ((FieldMapper.Builder)contentBuilder).indexName(name); + ((FieldMapper.Builder)contentBuilder).indexName(name); contentBuilder.name = name + "." + FieldNames.CONTENT; contentMapper = (FieldMapper) contentBuilder.build(context); context.path().add(name); @@ -222,21 +226,21 @@ public class AttachmentMapper extends FieldMapper { context.path().remove(); if (defaultIndexedChars == null && context.indexSettings() != null) { - defaultIndexedChars = context.indexSettings().getAsInt("index.mapping.attachment.indexed_chars", 100000); + defaultIndexedChars = INDEX_ATTACHMENT_INDEXED_CHARS_SETTING.get(context.indexSettings()); } if (defaultIndexedChars == null) { defaultIndexedChars = 100000; } if (ignoreErrors == null && context.indexSettings() != null) { - ignoreErrors = context.indexSettings().getAsBoolean("index.mapping.attachment.ignore_errors", Boolean.TRUE); + ignoreErrors = INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING.get(context.indexSettings()); } if (ignoreErrors == null) { ignoreErrors = Boolean.TRUE; } if (langDetect == null && context.indexSettings() != null) { - langDetect = context.indexSettings().getAsBoolean("index.mapping.attachment.detect_language", Boolean.FALSE); + langDetect = INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING.get(context.indexSettings()); } if (langDetect == null) { langDetect = Boolean.FALSE; @@ -299,9 +303,9 @@ public class AttachmentMapper extends FieldMapper { return mapperBuilder; } - @SuppressWarnings({"unchecked"}) @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + @SuppressWarnings("unchecked") // Safe because we know how our maps are shaped + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { AttachmentMapper.Builder builder = new AttachmentMapper.Builder(name); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { @@ -316,7 +320,7 @@ public class AttachmentMapper extends FieldMapper { Map propNode = (Map) entry1.getValue(); Mapper.Builder mapperBuilder = findMapperBuilder(propNode, propName, parserContext); - if (parseMultiField((FieldMapper.Builder) mapperBuilder, fieldName, parserContext, propName, propNode)) { + if (parseMultiField((FieldMapper.Builder) mapperBuilder, fieldName, parserContext, propName, propNode)) { fieldsIterator.remove(); } else if (propName.equals(name) && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { builder.content(mapperBuilder); diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java index 9b640f98d16..e224607fff1 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/MapperAttachmentsPlugin.java @@ -19,6 +19,7 @@ package org.elasticsearch.mapper.attachments; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.plugins.Plugin; @@ -34,6 +35,12 @@ public class MapperAttachmentsPlugin extends Plugin { return "Adds the attachment type allowing to parse difference attachment formats"; } + public void onModule(SettingsModule settingsModule) { + settingsModule.registerSetting(AttachmentMapper.INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING); + settingsModule.registerSetting(AttachmentMapper.INDEX_ATTACHMENT_IGNORE_ERRORS_SETTING); + settingsModule.registerSetting(AttachmentMapper.INDEX_ATTACHMENT_INDEXED_CHARS_SETTING); + } + public void onModule(IndicesModule indicesModule) { indicesModule.registerMapper("attachment", new AttachmentMapper.TypeParser()); } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java index 9b7d8afe381..81a82825f8b 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/AttachmentUnitTestCase.java @@ -22,6 +22,7 @@ package org.elasticsearch.mapper.attachments; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -39,7 +40,7 @@ public class AttachmentUnitTestCase extends ESTestCase { @Before public void createSettings() throws Exception { testSettings = Settings.builder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id) .build(); } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java index 1eecda65a05..1eccb1a1445 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/MapperAttachmentsRestIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.mapper.attachments; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; @@ -30,14 +29,6 @@ import java.io.IOException; public class MapperAttachmentsRestIT extends ESRestTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("plugin.types", MapperAttachmentsPlugin.class.getName()) - .build(); - } - public MapperAttachmentsRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/00_basic.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/00_basic.yaml index 819478d7d56..9654535f2ac 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/00_basic.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/00_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: mapper-attachments } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java index 97c5ad994a4..bbe342c716c 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3RestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.mapper.murmur3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class MapperMurmur3RestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(MapperMurmur3Plugin.class); - } - public MapperMurmur3RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index 1b54b8293a7..1da604c4184 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -33,14 +33,22 @@ import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + MapperRegistry mapperRegistry; IndexService indexService; DocumentMapperParser parser; diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java index b3ad01bae49..fe12cb042d4 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin; import org.elasticsearch.plugins.Plugin; @@ -62,7 +63,7 @@ public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase { Path dataPath = createTempDir(); Settings settings = Settings.builder() - .put("path.data", dataPath) + .put(Environment.PATH_DATA_SETTING.getKey(), dataPath) .build(); final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java index 9899776f7dd..84df085f221 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.mapper.size; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.mapper.MapperSizePlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class MapperSizeRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(MapperSizePlugin.class); - } - public MapperSizeRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java index 4529111c16e..a2af6df4e75 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugin.mapper.MapperSizePlugin; import org.elasticsearch.plugins.Plugin; @@ -63,7 +64,7 @@ public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { Path dataPath = createTempDir(); Settings settings = Settings.builder() - .put("path.data", dataPath) + .put(Environment.PATH_DATA_SETTING.getKey(), dataPath) .build(); final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index 956dd29402b..a44dddda3ed 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -34,9 +34,12 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -52,6 +55,11 @@ public class SizeMappingTests extends ESSingleNodeTestCase { MapperService mapperService; DocumentMapperParser parser; + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); // uses index.version.created + } + @Before public void before() { indexService = createIndex("test"); @@ -139,12 +147,12 @@ public class SizeMappingTests extends ESSingleNodeTestCase { String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), true, false); + DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_size").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), false, false); + DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); assertThat(disabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java index 86f58207e7d..3cecd810a95 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java @@ -123,9 +123,10 @@ public class AzureStorageServiceImpl extends AbstractLifecycleComponent> nodePlugins() { - return pluginList(AzureRepositoryPlugin.class, TestPlugin.class); + return pluginList(AzureRepositoryPlugin.class, TestPlugin.class, MockFSIndexStore.TestPlugin.class); } @Override @@ -93,8 +94,8 @@ public abstract class AbstractAzureRepositoryServiceTestCase extends AbstractAzu // During restore we frequently restore index to exactly the same state it was before, that might cause the same // checksum file to be written twice during restore operation return Settings.builder().put(super.indexSettings()) - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) - .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false) + .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) + .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING.getKey(), false) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build(); } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java index 0f144a13075..93683d9d014 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTest.java @@ -61,6 +61,16 @@ public class AzureStorageServiceTest extends ESTestCase { assertThat(client.getEndpoint(), is(URI.create("https://azure1"))); } + public void testGetDefaultClientWithNoSecondary() { + AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(Settings.builder() + .put("cloud.azure.storage.azure1.account", "myaccount1") + .put("cloud.azure.storage.azure1.key", "mykey1") + .build()); + azureStorageService.doStart(); + CloudBlobClient client = azureStorageService.getSelectedClient(null, LocationMode.PRIMARY_ONLY); + assertThat(client.getEndpoint(), is(URI.create("https://azure1"))); + } + public void testGetSelectedClientPrimary() { AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings); azureStorageService.doStart(); @@ -82,6 +92,13 @@ public class AzureStorageServiceTest extends ESTestCase { assertThat(client.getEndpoint(), is(URI.create("https://azure3"))); } + public void testGetDefaultClientWithPrimaryAndSecondaries() { + AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings); + azureStorageService.doStart(); + CloudBlobClient client = azureStorageService.getSelectedClient(null, LocationMode.PRIMARY_ONLY); + assertThat(client.getEndpoint(), is(URI.create("https://azure1"))); + } + public void testGetSelectedClientNonExisting() { AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings); azureStorageService.doStart(); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java index d0267d51b0e..ad58838bf5b 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositoryRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.repositories.azure; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.repository.azure.AzureRepositoryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class AzureRepositoryRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(AzureRepositoryPlugin.class); - } - public AzureRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java index cec7361de0a..1818a5e6252 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.repositories.RepositoryMissingException; import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.azure.AzureRepository.Repository; @@ -77,7 +78,7 @@ public class AzureSnapshotRestoreTests extends AbstractAzureWithThirdPartyTestCa protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder().put(super.nodeSettings(nodeOrdinal)) // In snapshot tests, we explicitly disable cloud discovery - .put("discovery.type", "local") + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") .build(); } @@ -86,8 +87,8 @@ public class AzureSnapshotRestoreTests extends AbstractAzureWithThirdPartyTestCa // During restore we frequently restore index to exactly the same state it was before, that might cause the same // checksum file to be written twice during restore operation return Settings.builder().put(super.indexSettings()) - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) - .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false) + .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) + .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING.getKey(), false) .build(); } diff --git a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yaml b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yaml index a77304ba5fc..fb929f1e822 100644 --- a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yaml +++ b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: repository-azure } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java index db423cdd44f..dea6e8b749f 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryRestIT.java @@ -19,24 +19,15 @@ package org.elasticsearch.repositories.hdfs; import java.io.IOException; -import java.util.Collection; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.hdfs.HdfsPlugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; public class HdfsRepositoryRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(HdfsPlugin.class); - } - public HdfsRepositoryRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml index 7c569408a61..6fbbfc82e87 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yaml @@ -13,7 +13,6 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: repository-hdfs } - - match: { nodes.$master.plugins.0.jvm: true } --- # # Check that we can't use file:// repositories or anything like that diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java index e823e8e6681..bc3706263f7 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugin.repository.s3.S3RepositoryPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ThirdParty; @@ -40,7 +41,7 @@ public abstract class AbstractAwsTestCase extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settings = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .extendArray("plugin.types", S3RepositoryPlugin.class.getName(), TestAwsS3Service.TestPlugin.class.getName()) .put("cloud.aws.test.random", randomInt()) .put("cloud.aws.test.write_failures", 0.1) diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java index 55f88fbfeea..151daaab2a8 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java @@ -59,8 +59,8 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase // During restore we frequently restore index to exactly the same state it was before, that might cause the same // checksum file to be written twice during restore operation return Settings.builder().put(super.indexSettings()) - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE, false) - .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE, false) + .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) + .put(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING.getKey(), false) .put("cloud.enabled", true) .put("plugin.types", S3RepositoryPlugin.class.getName()) .put("repositories.s3.base_path", basePath) diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java index 8521780b1d8..d8e436b50bb 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3RestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.repositories.s3; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.repository.s3.S3RepositoryPlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class RepositoryS3RestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(S3RepositoryPlugin.class); - } - public RepositoryS3RestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yaml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yaml index 811ff888baa..5fcc81209e2 100644 --- a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yaml +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: repository-s3 } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/plugins/site-example/src/site/_site/index.html b/plugins/site-example/src/site/_site/index.html deleted file mode 100644 index bc6343f6653..00000000000 --- a/plugins/site-example/src/site/_site/index.html +++ /dev/null @@ -1,6 +0,0 @@ - - - Page title - - Page body - diff --git a/plugins/site-example/src/test/java/org/elasticsearch/example/SiteContentsIT.java b/plugins/site-example/src/test/java/org/elasticsearch/example/SiteContentsIT.java deleted file mode 100644 index c92a0ba719e..00000000000 --- a/plugins/site-example/src/test/java/org/elasticsearch/example/SiteContentsIT.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.example; - -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; -import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ExternalTestCluster; -import org.elasticsearch.test.TestCluster; -import org.elasticsearch.test.rest.client.RestResponse; -import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; - -import java.net.InetSocketAddress; -import java.util.concurrent.TimeUnit; - -/** - * verifies content is actually served for the site plugin - */ -public class SiteContentsIT extends ESIntegTestCase { - - // TODO: probably a better way to test, but we don't want to really - // define a fake rest spec or anything? - public void test() throws Exception { - TestCluster cluster = cluster(); - assumeTrue("this test will not work from an IDE unless you pass tests.cluster pointing to a running instance", cluster instanceof ExternalTestCluster); - ExternalTestCluster externalCluster = (ExternalTestCluster) cluster; - try (CloseableHttpClient httpClient = HttpClients.createMinimal(new PoolingHttpClientConnectionManager(15, TimeUnit.SECONDS))) { - for (InetSocketAddress address : externalCluster.httpAddresses()) { - RestResponse restResponse = new RestResponse( - new HttpRequestBuilder(httpClient) - .host(NetworkAddress.formatAddress(address.getAddress())).port(address.getPort()) - .path("/_plugin/site-example/") - .method("GET").execute()); - assertEquals(200, restResponse.getStatusCode()); - String body = restResponse.getBodyAsString(); - assertTrue("unexpected body contents: " + body, body.contains("Page body")); - } - } - } -} diff --git a/plugins/site-example/src/test/java/org/elasticsearch/example/SiteRestIT.java b/plugins/site-example/src/test/java/org/elasticsearch/example/SiteRestIT.java deleted file mode 100644 index e3df9ce92d9..00000000000 --- a/plugins/site-example/src/test/java/org/elasticsearch/example/SiteRestIT.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.example; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.test.rest.RestTestCandidate; -import org.elasticsearch.test.rest.parser.RestTestParseException; - -import java.io.IOException; - -public class SiteRestIT extends ESRestTestCase { - - public SiteRestIT(@Name("yaml") RestTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws IOException, RestTestParseException { - return ESRestTestCase.createParameters(0, 1); - } -} - diff --git a/plugins/site-example/src/test/resources/rest-api-spec/test/example/10_basic.yaml b/plugins/site-example/src/test/resources/rest-api-spec/test/example/10_basic.yaml deleted file mode 100644 index a66ce5c9133..00000000000 --- a/plugins/site-example/src/test/resources/rest-api-spec/test/example/10_basic.yaml +++ /dev/null @@ -1,15 +0,0 @@ -# Integration tests for Example site plugin -# -"Example site loaded": - - do: - cluster.state: {} - - # Get master node id - - set: { master_node: master } - - - do: - nodes.info: {} - - - match: { nodes.$master.plugins.0.name: site-example } - - match: { nodes.$master.plugins.0.jvm: false } - - match: { nodes.$master.plugins.0.site: true } diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java index 9d9bdc1d389..03a19d752c1 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smbmmapfs/SmbMmapFsDirectoryService.java @@ -40,6 +40,6 @@ public class SmbMmapFsDirectoryService extends FsDirectoryService { @Override protected Directory newFSDirectory(Path location, LockFactory lockFactory) throws IOException { logger.debug("wrapping MMapDirectory for SMB"); - return new SmbDirectoryWrapper(new MMapDirectory(location, buildLockFactory(indexSettings))); + return new SmbDirectoryWrapper(new MMapDirectory(location, indexSettings.getValue(INDEX_LOCK_FACTOR_SETTING))); } } diff --git a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java index 649af173763..af1b0372995 100644 --- a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java +++ b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/SMBStoreRestIT.java @@ -21,22 +21,14 @@ package org.elasticsearch.index.store; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.plugin.store.smb.SMBStorePlugin; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.RestTestCandidate; import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -import java.util.Collection; public class SMBStoreRestIT extends ESRestTestCase { - @Override - protected Collection> nodePlugins() { - return pluginList(SMBStorePlugin.class); - } - public SMBStoreRestIT(@Name("yaml") RestTestCandidate testCandidate) { super(testCandidate); } diff --git a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yaml b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yaml index 155a39b1471..a210fd4e597 100644 --- a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yaml +++ b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yaml @@ -11,4 +11,3 @@ nodes.info: {} - match: { nodes.$master.plugins.0.name: store-smb } - - match: { nodes.$master.plugins.0.jvm: true } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java index 695d2a42321..c213f18f333 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java @@ -36,14 +36,14 @@ import java.util.Set; @SuppressForbidden(reason = "modifies system properties and attempts to create symbolic links intentionally") public class EvilSecurityTests extends ESTestCase { - + /** test generated permissions */ public void testGeneratedPermissions() throws Exception { Path path = createTempDir(); // make a fake ES home and ensure we only grant permissions to that. Path esHome = path.resolve("esHome"); Settings.Builder settingsBuilder = Settings.builder(); - settingsBuilder.put("path.home", esHome.toString()); + settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.toString()); Settings settings = settingsBuilder.build(); Path fakeTmpDir = createTempDir(); @@ -56,7 +56,7 @@ public class EvilSecurityTests extends ESTestCase { } finally { System.setProperty("java.io.tmpdir", realTmpDir); } - + // the fake es home assertNoPermissions(esHome, permissions); // its parent @@ -74,14 +74,14 @@ public class EvilSecurityTests extends ESTestCase { Path esHome = path.resolve("esHome"); Settings.Builder settingsBuilder = Settings.builder(); - settingsBuilder.put("path.home", esHome.resolve("home").toString()); - settingsBuilder.put("path.conf", esHome.resolve("conf").toString()); - settingsBuilder.put("path.scripts", esHome.resolve("scripts").toString()); - settingsBuilder.put("path.plugins", esHome.resolve("plugins").toString()); - settingsBuilder.putArray("path.data", esHome.resolve("data1").toString(), esHome.resolve("data2").toString()); - settingsBuilder.put("path.shared_data", esHome.resolve("custom").toString()); - settingsBuilder.put("path.logs", esHome.resolve("logs").toString()); - settingsBuilder.put("pidfile", esHome.resolve("test.pid").toString()); + settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.resolve("home").toString()); + settingsBuilder.put(Environment.PATH_CONF_SETTING.getKey(), esHome.resolve("conf").toString()); + settingsBuilder.put(Environment.PATH_SCRIPTS_SETTING.getKey(), esHome.resolve("scripts").toString()); + settingsBuilder.put(Environment.PATH_PLUGINS_SETTING.getKey(), esHome.resolve("plugins").toString()); + settingsBuilder.putArray(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(), esHome.resolve("data2").toString()); + settingsBuilder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), esHome.resolve("custom").toString()); + settingsBuilder.put(Environment.PATH_LOGS_SETTING.getKey(), esHome.resolve("logs").toString()); + settingsBuilder.put(Environment.PIDFILE_SETTING.getKey(), esHome.resolve("test.pid").toString()); Settings settings = settingsBuilder.build(); Path fakeTmpDir = createTempDir(); @@ -104,7 +104,7 @@ public class EvilSecurityTests extends ESTestCase { assertNoPermissions(esHome.getParent().resolve("other"), permissions); // double check we overwrote java.io.tmpdir correctly for the test assertNoPermissions(PathUtils.get(realTmpDir), permissions); - + // check that all directories got permissions: // bin file: ro @@ -135,10 +135,10 @@ public class EvilSecurityTests extends ESTestCase { // PID file: delete only (for the shutdown hook) assertExactPermissions(new FilePermission(environment.pidFile().toString(), "delete"), permissions); } - + public void testEnsureSymlink() throws IOException { Path p = createTempDir(); - + Path exists = p.resolve("exists"); Files.createDirectory(exists); @@ -154,7 +154,7 @@ public class EvilSecurityTests extends ESTestCase { Security.ensureDirectoryExists(linkExists); Files.createTempFile(linkExists, null, null); } - + public void testEnsureBrokenSymlink() throws IOException { Path p = createTempDir(); @@ -199,7 +199,7 @@ public class EvilSecurityTests extends ESTestCase { assertExactPermissions(new FilePermission(target.resolve("foo").toString(), "read"), permissions); } - /** + /** * checks exact file permissions, meaning those and only those for that path. */ static void assertExactPermissions(FilePermission expected, PermissionCollection actual) { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java index 8633511756d..45f3df22cd7 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CheckFileCommandTests.java @@ -124,7 +124,7 @@ public class CheckFileCommandTests extends ESTestCase { try (FileSystem fs = Jimfs.newFileSystem(configuration)) { Path path = fs.getPath(randomAsciiOfLength(10)); Settings settings = Settings.builder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); new CreateFileCommand(captureOutputTerminal, path).execute(settings, new Environment(settings)); assertThat(Files.exists(path), is(true)); @@ -141,7 +141,7 @@ public class CheckFileCommandTests extends ESTestCase { Files.write(path, "anything".getBytes(StandardCharsets.UTF_8)); Settings settings = Settings.builder() - .put("path.home", createTempDir().toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); new DeleteFileCommand(captureOutputTerminal, path).execute(settings, new Environment(settings)); assertThat(Files.exists(path), is(false)); @@ -173,7 +173,7 @@ public class CheckFileCommandTests extends ESTestCase { this.fs = fs; this.paths = new Path[] { writePath(fs, "p1", "anything"), writePath(fs, "p2", "anything"), writePath(fs, "p3", "anything") }; Settings settings = Settings.settingsBuilder() - .put("path.home", baseDir.toString()) + .put(Environment.PATH_HOME_SETTING.getKey(), baseDir.toString()) .build(); return super.execute(Settings.EMPTY, new Environment(settings)); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java index 2613ab9951a..92ab945dfca 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/cli/CliToolTests.java @@ -28,7 +28,9 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; import java.io.IOException; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -115,9 +117,9 @@ public class CliToolTests extends CliToolTestCase { public void testMultiCommand() { Terminal terminal = new MockTerminal(); int count = randomIntBetween(2, 7); - final AtomicReference[] executed = new AtomicReference[count]; - for (int i = 0; i < executed.length; i++) { - executed[i] = new AtomicReference<>(false); + List> executed = new ArrayList<>(count); + for (int i = 0; i < count; i++) { + executed.add(new AtomicReference<>(false)); } NamedCommand[] cmds = new NamedCommand[count]; for (int i = 0; i < count; i++) { @@ -125,7 +127,7 @@ public class CliToolTests extends CliToolTestCase { cmds[i] = new NamedCommand("cmd" + index, terminal) { @Override public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - executed[index].set(true); + executed.get(index).set(true); return OK; } }; @@ -134,17 +136,17 @@ public class CliToolTests extends CliToolTestCase { int cmdIndex = randomIntBetween(0, count-1); CliTool.ExitStatus status = tool.execute("cmd" + cmdIndex); assertThat(status, is(OK)); - for (int i = 0; i < executed.length; i++) { - assertThat(executed[i].get(), is(i == cmdIndex)); + for (int i = 0; i < count; i++) { + assertThat(executed.get(i).get(), is(i == cmdIndex)); } } public void testMultiCommandUnknownCommand() { Terminal terminal = new MockTerminal(); int count = randomIntBetween(2, 7); - final AtomicReference[] executed = new AtomicReference[count]; - for (int i = 0; i < executed.length; i++) { - executed[i] = new AtomicReference<>(false); + List> executed = new ArrayList<>(count); + for (int i = 0; i < count; i++) { + executed.add(new AtomicReference<>(false)); } NamedCommand[] cmds = new NamedCommand[count]; for (int i = 0; i < count; i++) { @@ -152,7 +154,7 @@ public class CliToolTests extends CliToolTestCase { cmds[i] = new NamedCommand("cmd" + index, terminal) { @Override public CliTool.ExitStatus execute(Settings settings, Environment env) throws Exception { - executed[index].set(true); + executed.get(index).set(true); return OK; } }; @@ -160,8 +162,8 @@ public class CliToolTests extends CliToolTestCase { MultiCmdTool tool = new MultiCmdTool("tool", terminal, cmds); CliTool.ExitStatus status = tool.execute("cmd" + count); // "cmd" + count doesn't exist assertThat(status, is(CliTool.ExitStatus.USAGE)); - for (int i = 0; i < executed.length; i++) { - assertThat(executed[i].get(), is(false)); + for (int i = 0; i < count; i++) { + assertThat(executed.get(i).get(), is(false)); } } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java index 3789c273cf8..d2c8ccfd3c9 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/node/internal/EvilInternalSettingsPreparerTests.java @@ -72,7 +72,7 @@ public class EvilInternalSettingsPreparerTests extends ESTestCase { @Before public void createBaseEnvSettings() { baseEnvSettings = settingsBuilder() - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java index 0eebc9731ff..5e70cf71923 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerPermissionTests.java @@ -70,13 +70,13 @@ public class PluginManagerPermissionTests extends ESTestCase { @Before public void setup() { Path tempDir = createTempDir(); - Settings.Builder settingsBuilder = settingsBuilder().put("path.home", tempDir); + Settings.Builder settingsBuilder = settingsBuilder().put(Environment.PATH_HOME_SETTING.getKey(), tempDir); if (randomBoolean()) { - settingsBuilder.put("path.plugins", createTempDir()); + settingsBuilder.put(Environment.PATH_PLUGINS_SETTING.getKey(), createTempDir()); } if (randomBoolean()) { - settingsBuilder.put("path.conf", createTempDir()); + settingsBuilder.put(Environment.PATH_CONF_SETTING.getKey(), createTempDir()); } environment = new Environment(settingsBuilder.build()); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java index bc92f894019..24055d9f6dc 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerTests.java @@ -108,7 +108,7 @@ public class PluginManagerTests extends ESIntegTestCase { @Before public void setup() throws Exception { environment = buildInitialSettings(); - System.setProperty("es.default.path.home", environment.settings().get("path.home")); + System.setProperty("es.default.path.home", Environment.PATH_HOME_SETTING.get(environment.settings())); Path binDir = environment.binFile(); if (!Files.exists(binDir)) { Files.createDirectories(binDir); @@ -196,7 +196,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); assertStatus("install", USAGE); } @@ -216,7 +215,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); Path binDir = environment.binFile(); @@ -260,7 +258,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); Path pluginConfigDir = environment.configFile().resolve(pluginName); @@ -296,7 +293,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "2.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); assertStatusOk(String.format(Locale.ROOT, "install %s --verbose", pluginUrl)); @@ -361,7 +357,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); Path binDir = environment.binFile(); @@ -392,16 +387,13 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); System.err.println("install " + pluginUrl + " --verbose"); ExitStatus status = new PluginManagerCliParser(terminal).execute(args("install " + pluginUrl + " --verbose")); assertThat("Terminal output was: " + terminal.getTerminalOutput(), status, is(ExitStatus.OK)); assertThat(terminal.getTerminalOutput(), hasItem(containsString("Name: fake-plugin"))); assertThat(terminal.getTerminalOutput(), hasItem(containsString("Description: fake desc"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Site: false"))); assertThat(terminal.getTerminalOutput(), hasItem(containsString("Version: 1.0"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("JVM: true"))); assertThatPluginIsListed(pluginName); } @@ -414,7 +406,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); ExitStatus status = new PluginManagerCliParser(terminal).execute(args("install " + pluginUrl)); assertThat("Terminal output was: " + terminal.getTerminalOutput(), status, is(ExitStatus.OK)); @@ -447,7 +438,6 @@ public class PluginManagerTests extends ESIntegTestCase { "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), "isolated", "false", - "jvm", "true", "classname", "FakePlugin"); // install @@ -465,63 +455,20 @@ public class PluginManagerTests extends ESIntegTestCase { assertTrue(foundExpectedMessage); } - public void testInstallSitePluginVerbose() throws IOException { - String pluginName = "fake-plugin"; - Path pluginDir = createTempDir().resolve(pluginName); - Files.createDirectories(pluginDir.resolve("_site")); - Files.createFile(pluginDir.resolve("_site").resolve("somefile")); - String pluginUrl = createPlugin(pluginDir, - "description", "fake desc", - "name", pluginName, - "version", "1.0", - "site", "true"); - ExitStatus status = new PluginManagerCliParser(terminal).execute(args("install " + pluginUrl + " --verbose")); - assertThat("Terminal output was: " + terminal.getTerminalOutput(), status, is(ExitStatus.OK)); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Name: fake-plugin"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Description: fake desc"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Site: true"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("Version: 1.0"))); - assertThat(terminal.getTerminalOutput(), hasItem(containsString("JVM: false"))); - assertThatPluginIsListed(pluginName); - // We want to check that Plugin Manager moves content to _site - assertFileExists(environment.pluginsFile().resolve(pluginName).resolve("_site")); - } - - public void testInstallSitePlugin() throws IOException { - String pluginName = "fake-plugin"; - Path pluginDir = createTempDir().resolve(pluginName); - Files.createDirectories(pluginDir.resolve("_site")); - Files.createFile(pluginDir.resolve("_site").resolve("somefile")); - String pluginUrl = createPlugin(pluginDir, - "description", "fake desc", - "name", pluginName, - "version", "1.0", - "site", "true"); - ExitStatus status = new PluginManagerCliParser(terminal).execute(args("install " + pluginUrl)); - assertThat("Terminal output was: " + terminal.getTerminalOutput(), status, is(ExitStatus.OK)); - assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("Name: fake-plugin")))); - assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("Description:")))); - assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("Site:")))); - assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("Version:")))); - assertThat(terminal.getTerminalOutput(), not(hasItem(containsString("JVM:")))); - assertThatPluginIsListed(pluginName); - // We want to check that Plugin Manager moves content to _site - assertFileExists(environment.pluginsFile().resolve(pluginName).resolve("_site")); - } - public void testInstallPluginWithBadChecksum() throws IOException { String pluginName = "fake-plugin"; Path pluginDir = createTempDir().resolve(pluginName); - Files.createDirectories(pluginDir.resolve("_site")); - Files.createFile(pluginDir.resolve("_site").resolve("somefile")); String pluginUrl = createPluginWithBadChecksum(pluginDir, - "description", "fake desc", - "version", "1.0", - "site", "true"); + "description", "fake desc", + "name", pluginName, + "version", "1.0", + "elasticsearch.version", Version.CURRENT.toString(), + "java.version", System.getProperty("java.specification.version"), + "classname", "FakePlugin"); assertStatus(String.format(Locale.ROOT, "install %s --verbose", pluginUrl), ExitStatus.IO_ERROR); assertThatPluginIsNotListed(pluginName); - assertFileNotExists(environment.pluginsFile().resolve(pluginName).resolve("_site")); + assertFileNotExists(environment.pluginsFile().resolve(pluginName)); } private void singlePluginInstallAndRemove(String pluginDescriptor, String pluginName, String pluginCoordinates) throws IOException { @@ -606,7 +553,6 @@ public class PluginManagerTests extends ESIntegTestCase { "version", "1.0.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), - "jvm", "true", "classname", "FakePlugin"); // We want to remove plugin with plugin short name @@ -750,7 +696,7 @@ public class PluginManagerTests extends ESIntegTestCase { private Environment buildInitialSettings() throws IOException { Settings settings = settingsBuilder() .put("http.enabled", true) - .put("path.home", createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); return InternalSettingsPreparer.prepareEnvironment(settings, null); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java index 266b44ebfbd..49edcc7b1d4 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/PluginManagerUnitTests.java @@ -56,8 +56,8 @@ public class PluginManagerUnitTests extends ESTestCase { Path genericConfigFolder = createTempDir(); Settings settings = settingsBuilder() - .put("path.conf", genericConfigFolder) - .put("path.home", homeFolder) + .put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder) + .put(Environment.PATH_HOME_SETTING.getKey(), homeFolder) .build(); Environment environment = new Environment(settings); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 1ad972e10ef..24560aa8b1c 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.env.Environment; import org.elasticsearch.node.Node; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESIntegTestCase; @@ -56,21 +57,21 @@ public class TribeUnitTests extends ESTestCase { Settings baseSettings = Settings.builder() .put("http.enabled", false) .put("node.mode", NODE_MODE) - .put("path.home", createTempDir()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); tribe1 = new TribeClientNode( Settings.builder() .put(baseSettings) .put("cluster.name", "tribe1") .put("name", "tribe1_node") - .put(DiscoveryService.SETTING_DISCOVERY_SEED, random().nextLong()) + .put(DiscoveryService.DISCOVERY_SEED_SETTING.getKey(), random().nextLong()) .build()).start(); tribe2 = new TribeClientNode( Settings.builder() .put(baseSettings) .put("cluster.name", "tribe2") .put("name", "tribe2_node") - .put(DiscoveryService.SETTING_DISCOVERY_SEED, random().nextLong()) + .put(DiscoveryService.DISCOVERY_SEED_SETTING.getKey(), random().nextLong()) .build()).start(); } @@ -102,7 +103,11 @@ public class TribeUnitTests extends ESTestCase { public void testThatTribeClientsIgnoreGlobalConfig() throws Exception { Path pathConf = getDataPath("elasticsearch.yml").getParent(); - Settings settings = Settings.builder().put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true).put("path.conf", pathConf).build(); + Settings settings = Settings + .builder() + .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) + .put(Environment.PATH_CONF_SETTING.getKey(), pathConf) + .build(); assertTribeNodeSuccesfullyCreated(settings); } @@ -111,7 +116,7 @@ public class TribeUnitTests extends ESTestCase { //they can find their corresponding tribes using the proper transport Settings settings = Settings.builder().put("http.enabled", false).put("node.name", "tribe_node") .put("tribe.t1.node.mode", NODE_MODE).put("tribe.t2.node.mode", NODE_MODE) - .put("path.home", createTempDir()).put(extraSettings).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).put(extraSettings).build(); try (Node node = new Node(settings).start()) { try (Client client = node.client()) { diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index cddea9fd774..3fea66459c2 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.junit.After; import org.junit.AfterClass; @@ -67,11 +68,6 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { */ public static final String TESTS_CLUSTER = "tests.cluster"; - /** - * Defaults to localhost:9300 - */ - public static final String TESTS_CLUSTER_DEFAULT = "localhost:9300"; - protected static final ESLogger logger = ESLoggerFactory.getLogger(ESSmokeClientTestCase.class.getName()); private static final AtomicInteger counter = new AtomicInteger(); @@ -84,7 +80,7 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { .put("name", "qa_smoke_client_" + counter.getAndIncrement()) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) - .put("path.home", tempDir) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put("node.mode", "network").build(); // we require network here! TransportClient.Builder transportClientBuilder = TransportClient.builder().settings(clientSettings); @@ -131,11 +127,10 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { } @BeforeClass - public static void initializeSettings() throws UnknownHostException { + public static void initializeSettings() { clusterAddresses = System.getProperty(TESTS_CLUSTER); if (clusterAddresses == null || clusterAddresses.isEmpty()) { - clusterAddresses = TESTS_CLUSTER_DEFAULT; - logger.info("[{}] not set. Falling back to [{}]", TESTS_CLUSTER, TESTS_CLUSTER_DEFAULT); + fail("Must specify " + TESTS_CLUSTER + " for smoke client test"); } } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash index 54978b39605..da5790d69c8 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/plugin_test_cases.bash @@ -263,12 +263,6 @@ fi install_and_check_plugin repository s3 aws-java-sdk-core-*.jar } -@test "[$GROUP] install site example" { - # Doesn't use install_and_check_plugin because this is a site plugin - install_plugin site-example $(readlink -m site-example-*.zip) - assert_file_exist "$ESHOME/plugins/site-example/_site/index.html" -} - @test "[$GROUP] install store-smb plugin" { install_and_check_plugin store smb } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json index 7f13d47fc06..baba4e3436a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.get_settings.json @@ -18,6 +18,11 @@ "timeout": { "type" : "time", "description" : "Explicit operation timeout" + }, + "include_defaults": { + "type": "boolean", + "description": "Whether to return all default clusters setting.", + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json index 2c0c59f6898..ed96200687c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json @@ -44,6 +44,11 @@ "type": "boolean", "description": "Whether to return version and creation date values in human-readable format.", "default": false + }, + "include_defaults": { + "type": "boolean", + "description": "Whether to return all default setting for each of the indices.", + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json index 1e35c272fa0..17a1cd03118 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json @@ -42,6 +42,11 @@ "type": "boolean", "description": "Whether to return version and creation date values in human-readable format.", "default": false + }, + "include_defaults": { + "type": "boolean", + "description": "Whether to return all default setting for each of the indices.", + "default": false } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.plugins/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.plugins/10_basic.yaml index bf974c85703..86f2a360afa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.plugins/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.plugins/10_basic.yaml @@ -10,7 +10,5 @@ name .+ \n component .+ \n version .+ \n - type .+ \n - url .+ \n description .+ \n $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/11_reset.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/11_reset.yaml new file mode 100644 index 00000000000..6c93dabeec7 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/11_reset.yaml @@ -0,0 +1,33 @@ +--- +setup: + - do: + indices.create: + body: + settings: + index: + refresh_interval: 10s + index: test-index +--- +Test reset index settings: + - do: + indices.get_settings: + flat_settings: true + index: test-index + - match: + test-index.settings.index\.refresh_interval: "10s" + - do: + indices.put_settings: + body: + refresh_interval: null + - do: + indices.get_settings: + flat_settings: false + - is_false: + test-index.settings.index\.refresh_interval + - do: + indices.get_settings: + include_defaults: true + flat_settings: true + index: test-index + - match: + test-index.defaults.index\.refresh_interval: "1s" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yaml index 78da54ef8fd..a1367f8c7d2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/termvectors/20_issue7121.yaml @@ -5,7 +5,7 @@ body: settings: index: - translog.disable_flush: true + translog.flush_threshold_size: "512MB" number_of_shards: 1 number_of_replicas: 0 refresh_interval: -1 diff --git a/settings.gradle b/settings.gradle index 55126b3c808..682bfec96d9 100644 --- a/settings.gradle +++ b/settings.gradle @@ -34,7 +34,6 @@ List projects = [ 'plugins:repository-hdfs', 'plugins:repository-s3', 'plugins:jvm-example', - 'plugins:site-example', 'plugins:store-smb', 'qa:evil-tests', 'qa:smoke-test-client', diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index b2ce5ebd86e..60cde5a5194 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -154,11 +154,9 @@ public class BootstrapForTesting { try (InputStream stream = url.openStream()) { properties.load(stream); } - if (Boolean.parseBoolean(properties.getProperty("jvm"))) { - String clazz = properties.getProperty("classname"); - if (clazz != null) { - Class.forName(clazz); - } + String clazz = properties.getProperty("classname"); + if (clazz != null) { + Class.forName(clazz); } } } catch (Exception e) { diff --git a/test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java b/test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java index 99cd417133d..80b2c3279bf 100644 --- a/test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java +++ b/test/framework/src/main/java/org/elasticsearch/cache/recycler/MockPageCacheRecycler.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.recycler.Recycler.V; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.threadpool.ThreadPool; import java.lang.reflect.Array; @@ -63,8 +62,10 @@ public class MockPageCacheRecycler extends PageCacheRecycler { @Inject public MockPageCacheRecycler(Settings settings, ThreadPool threadPool) { super(settings, threadPool); - final long seed = settings.getAsLong(InternalTestCluster.SETTING_CLUSTER_NODE_SEED, 0L); - random = new Random(seed); + // we always initialize with 0 here since we really only wanna have some random bytes / ints / longs + // and given the fact that it's called concurrently it won't reproduces anyway the same order other than in a unittest + // for the latter 0 is just fine + random = new Random(0); } private V wrap(final V v) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java index 1c110bc405a..a9b45a5b336 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java @@ -45,7 +45,7 @@ public class MapperTestUtils { public static MapperService newMapperService(Path tempDir, Settings settings, IndicesModule indicesModule) throws IOException { Settings.Builder settingsBuilder = Settings.builder() - .put("path.home", tempDir) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(settings); if (settings.get(IndexMetaData.SETTING_VERSION_CREATED) == null) { settingsBuilder.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT); diff --git a/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java b/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java index db63d137f32..3c72701f214 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java @@ -22,8 +22,10 @@ import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.engine.MockEngineFactory; +import org.elasticsearch.test.engine.MockEngineSupport; import java.util.Collection; import java.util.Collections; @@ -41,6 +43,11 @@ public class MockEngineFactoryPlugin extends Plugin { private Class readerWrapper = AssertingDirectoryReader.class; + public void onModule(SettingsModule module) { + module.registerSetting(MockEngineSupport.DISABLE_FLUSH_ON_CLOSE); + module.registerSetting(MockEngineSupport.WRAP_READER_RATIO); + } + @Override public void onIndexModule(IndexModule module) { module.engineFactory.set(new MockEngineFactory(readerWrapper)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ActionRecordingPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/ActionRecordingPlugin.java new file mode 100644 index 00000000000..a51c3f9eb40 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/ActionRecordingPlugin.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionModule; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilter; +import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +import static java.util.Collections.unmodifiableList; + +/** + * Plugin that registers a filter that records actions. + */ +public class ActionRecordingPlugin extends Plugin { + /** + * Fetch all the requests recorded by the test plugin. The list is an + * immutable, moment in time snapshot. + */ + public static List> allRequests() { + List> requests = new ArrayList<>(); + for (RecordingFilter filter : ESIntegTestCase.internalCluster().getInstances(RecordingFilter.class)) { + requests.addAll(filter.requests); + } + return unmodifiableList(requests); + } + + /** + * Fetch all requests recorded by the test plugin of a certain type. The + * list is an immutable, moment in time snapshot. + */ + public static List requestsOfType(Class type) { + List requests = new ArrayList<>(); + for (RecordingFilter filter : ESIntegTestCase.internalCluster().getInstances(RecordingFilter.class)) { + for (ActionRequest request : filter.requests) { + if (type.isInstance(request)) { + requests.add(type.cast(request)); + } + } + } + return unmodifiableList(requests); + } + + /** + * Clear all the recorded requests. Use between test methods that shared a + * suite scoped cluster. + */ + public static void clear() { + for (RecordingFilter filter : ESIntegTestCase.internalCluster().getInstances(RecordingFilter.class)) { + filter.requests.clear(); + } + } + + @Override + public String name() { + return "test-action-logging"; + } + + @Override + public String description() { + return "Test action logging"; + } + + @Override + public Collection nodeModules() { + return Collections.singletonList(new ActionRecordingModule()); + } + + public void onModule(ActionModule module) { + module.registerFilter(RecordingFilter.class); + } + + public static class ActionRecordingModule extends AbstractModule { + @Override + protected void configure() { + bind(RecordingFilter.class).asEagerSingleton(); + } + + } + + public static class RecordingFilter extends ActionFilter.Simple { + private final List> requests = new CopyOnWriteArrayList<>(); + + @Inject + public RecordingFilter(Settings settings) { + super(settings); + } + + public List> getRequests() { + return new ArrayList<>(requests); + } + + @Override + public int order() { + return 999; + } + + @Override + protected boolean apply(String action, ActionRequest request, ActionListener listener) { + requests.add(request); + return true; + } + + @Override + protected boolean apply(String action, ActionResponse response, ActionListener listener) { + return true; + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 3124fc9f8bc..ca11fa242d5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -75,8 +75,11 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -159,6 +162,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BooleanSupplier; +import java.util.function.Function; import static org.elasticsearch.client.Requests.syncedFlushRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; @@ -266,7 +270,7 @@ public abstract class ESIntegTestCase extends ESTestCase { * The value of this seed can be used to initialize a random context for a specific index. * It's set once per test via a generic index template. */ - public static final String SETTING_INDEX_SEED = "index.tests.seed"; + public static final Setting INDEX_TEST_SEED_SETTING = Setting.longSetting("index.tests.seed", 0, Long.MIN_VALUE, false, Setting.Scope.INDEX); /** * A boolean value to enable or disable mock modules. This is useful to test the @@ -365,8 +369,11 @@ public abstract class ESIntegTestCase extends ESTestCase { // TODO move settings for random directory etc here into the index based randomized settings. if (cluster().size() > 0) { Settings.Builder randomSettingsBuilder = - setRandomIndexSettings(getRandom(), Settings.builder()) - .put(SETTING_INDEX_SEED, getRandom().nextLong()); + setRandomIndexSettings(getRandom(), Settings.builder()); + if (isInternalCluster()) { + // this is only used by mock plugins and if the cluster is not internal we just can't set it + randomSettingsBuilder.put(INDEX_TEST_SEED_SETTING.getKey(), getRandom().nextLong()); + } randomSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, numberOfShards()) .put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas()); @@ -465,20 +472,20 @@ public abstract class ESIntegTestCase extends ESTestCase { setRandomIndexNormsLoading(random, builder); if (random.nextBoolean()) { - builder.put(MergeSchedulerConfig.AUTO_THROTTLE, false); + builder.put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), false); } if (random.nextBoolean()) { - builder.put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED, random.nextBoolean()); + builder.put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), random.nextBoolean()); } if (random.nextBoolean()) { - builder.put("index.shard.check_on_startup", randomFrom(random, "false", "checksum", "true")); + builder.put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "checksum", "true")); } if (randomBoolean()) { // keep this low so we don't stall tests - builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, RandomInts.randomIntBetween(random, 1, 15) + "ms"); + builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), RandomInts.randomIntBetween(random, 1, 15) + "ms"); } return builder; @@ -486,15 +493,15 @@ public abstract class ESIntegTestCase extends ESTestCase { private static Settings.Builder setRandomIndexMergeSettings(Random random, Settings.Builder builder) { if (random.nextBoolean()) { - builder.put(MergePolicyConfig.INDEX_COMPOUND_FORMAT, + builder.put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), random.nextBoolean() ? random.nextDouble() : random.nextBoolean()); } switch (random.nextInt(4)) { case 3: final int maxThreadCount = RandomInts.randomIntBetween(random, 1, 4); final int maxMergeCount = RandomInts.randomIntBetween(random, maxThreadCount, maxThreadCount + 4); - builder.put(MergeSchedulerConfig.MAX_MERGE_COUNT, maxMergeCount); - builder.put(MergeSchedulerConfig.MAX_THREAD_COUNT, maxThreadCount); + builder.put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), maxMergeCount); + builder.put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), maxThreadCount); break; } @@ -503,27 +510,27 @@ public abstract class ESIntegTestCase extends ESTestCase { private static Settings.Builder setRandomIndexNormsLoading(Random random, Settings.Builder builder) { if (random.nextBoolean()) { - builder.put(SearchService.NORMS_LOADING_KEY, RandomPicks.randomFrom(random, Arrays.asList(MappedFieldType.Loading.EAGER, MappedFieldType.Loading.LAZY))); + builder.put(SearchService.INDEX_NORMS_LOADING_SETTING.getKey(), RandomPicks.randomFrom(random, Arrays.asList(MappedFieldType.Loading.EAGER, MappedFieldType.Loading.LAZY))); } return builder; } private static Settings.Builder setRandomIndexTranslogSettings(Random random, Settings.Builder builder) { if (random.nextBoolean()) { - builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 300), ByteSizeUnit.MB)); + builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 300), ByteSizeUnit.MB)); } if (random.nextBoolean()) { - builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE, new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush + builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush } if (random.nextBoolean()) { - builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY, RandomPicks.randomFrom(random, Translog.Durability.values())); + builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), RandomPicks.randomFrom(random, Translog.Durability.values())); } if (random.nextBoolean()) { if (rarely(random)) { - builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, 0); // 0 has special meaning to sync each op + builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), 0); // 0 has special meaning to sync each op } else { - builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL, RandomInts.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS); + builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), RandomInts.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS); } } @@ -1283,7 +1290,7 @@ public abstract class ESIntegTestCase extends ESTestCase { */ protected final void enableAllocation(String... indices) { client().admin().indices().prepareUpdateSettings(indices).setSettings(Settings.builder().put( - EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, "all" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all" )).get(); } @@ -1292,7 +1299,7 @@ public abstract class ESIntegTestCase extends ESTestCase { */ protected final void disableAllocation(String... indices) { client().admin().indices().prepareUpdateSettings(indices).setSettings(Settings.builder().put( - EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE, "none" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none" )).get(); } @@ -1678,7 +1685,7 @@ public abstract class ESIntegTestCase extends ESTestCase { .put("script.indexed", "on") .put("script.inline", "on") // wait short time for other active shards before actually deleting, default 30s not needed in tests - .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT, new TimeValue(1, TimeUnit.SECONDS)); + .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT.getKey(), new TimeValue(1, TimeUnit.SECONDS)); return builder.build(); } @@ -1752,7 +1759,7 @@ public abstract class ESIntegTestCase extends ESTestCase { NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(Node.HTTP_ENABLED, false). + return Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false). put(ESIntegTestCase.this.nodeSettings(nodeOrdinal)).build(); } @@ -1796,7 +1803,16 @@ public abstract class ESIntegTestCase extends ESTestCase { return new InternalTestCluster(nodeMode, seed, createTempDir(), minNumDataNodes, maxNumDataNodes, InternalTestCluster.clusterName(scope.name(), seed) + "-cluster", nodeConfigurationSource, getNumClientNodes(), - InternalTestCluster.DEFAULT_ENABLE_HTTP_PIPELINING, nodePrefix, mockPlugins); + InternalTestCluster.DEFAULT_ENABLE_HTTP_PIPELINING, nodePrefix, mockPlugins, getClientWrapper()); + } + + /** + * Returns a function that allows to wrap / filter all clients that are exposed by the test cluster. This is useful + * for debugging or request / response pre and post processing. It also allows to intercept all calls done by the test + * framework. By default this method returns an identity function {@link Function#identity()}. + */ + protected Function getClientWrapper() { + return Function.identity(); } /** Return the mock plugins the cluster should use */ @@ -1822,9 +1838,25 @@ public abstract class ESIntegTestCase extends ESTestCase { mocks.add(AssertingLocalTransport.TestPlugin.class); } } + mocks.add(TestSeedPlugin.class); return Collections.unmodifiableList(mocks); } + public static final class TestSeedPlugin extends Plugin { + @Override + public String name() { + return "test-seed-plugin"; + } + @Override + public String description() { + return "a test plugin that registeres index.tests.seed as an index setting"; + } + public void onModule(SettingsModule module) { + module.registerSetting(INDEX_TEST_SEED_SETTING); + } + + } + /** * Returns the client ratio configured via */ @@ -2059,11 +2091,11 @@ public abstract class ESIntegTestCase extends ESTestCase { assertTrue(Files.exists(dest)); Settings.Builder builder = Settings.builder() .put(settings) - .put("path.data", dataDir.toAbsolutePath()); + .put(Environment.PATH_DATA_SETTING.getKey(), dataDir.toAbsolutePath()); Path configDir = indexDir.resolve("config"); if (Files.exists(configDir)) { - builder.put("path.conf", configDir.toAbsolutePath()); + builder.put(Environment.PATH_CONF_SETTING.getKey(), configDir.toAbsolutePath()); } return builder.build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 9b06bae21b0..f73839c5cec 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexService; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.MockNode; @@ -158,10 +159,10 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { private Node newNode() { Settings settings = Settings.builder() .put(ClusterName.SETTING, InternalTestCluster.clusterName("single-node-cluster", randomLong())) - .put("path.home", createTempDir()) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) // TODO: use a consistent data path for custom paths // This needs to tie into the ESIntegTestCase#indexSettings() method - .put("path.shared_data", createTempDir().getParent()) + .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), createTempDir().getParent()) .put("node.name", nodeName()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 3777653297e..598b6216ce2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -531,8 +531,8 @@ public abstract class ESTestCase extends LuceneTestCase { public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException { Settings build = Settings.builder() .put(settings) - .put("path.home", createTempDir().toAbsolutePath()) - .putArray("path.data", tmpPaths()).build(); + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) + .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); return new NodeEnvironment(build, new Environment(build)); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 05f194fc26a..5d169fc6acd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -53,7 +53,7 @@ final class ExternalNode implements Closeable { public static final Settings REQUIRED_SETTINGS = Settings.builder() .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) - .put(DiscoveryModule.DISCOVERY_TYPE_KEY, "zen") + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") .put("node.mode", "network").build(); // we need network mode for this private final Path path; @@ -112,7 +112,7 @@ final class ExternalNode implements Closeable { case "node.mode": case "node.local": case NetworkModule.TRANSPORT_TYPE_KEY: - case DiscoveryModule.DISCOVERY_TYPE_KEY: + case "discovery.type": case NetworkModule.TRANSPORT_SERVICE_TYPE_KEY: case InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING: continue; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index 34b6bfbfb14..0b3facca05d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; @@ -74,7 +75,7 @@ public final class ExternalTestCluster extends TestCluster { .put("name", InternalTestCluster.TRANSPORT_CLIENT_PREFIX + EXTERNAL_CLUSTER_PREFIX + counter.getAndIncrement()) .put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true) // prevents any settings to be replaced by system properties. .put("client.transport.ignore_cluster_name", true) - .put("path.home", tempDir) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put("node.mode", "network").build(); // we require network here! TransportClient.Builder transportClientBuilder = TransportClient.builder().settings(clientSettings); diff --git a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java index 39e1857f412..f4e08979e7c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java +++ b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java @@ -21,11 +21,16 @@ package org.elasticsearch.test; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import java.util.Collections; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; public class IndexSettingsModule extends AbstractModule { @@ -43,17 +48,21 @@ public class IndexSettingsModule extends AbstractModule { bind(IndexSettings.class).toInstance(newIndexSettings(index, settings)); } - public static IndexSettings newIndexSettings(String index, Settings settings) { - return newIndexSettings(new Index(index), settings); + public static IndexSettings newIndexSettings(String index, Settings settings, Setting... setting) { + return newIndexSettings(new Index(index), settings, setting); } - public static IndexSettings newIndexSettings(Index index, Settings settings) { + public static IndexSettings newIndexSettings(Index index, Settings settings, Setting... setting) { Settings build = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(settings) - .build(); + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(settings) + .build(); IndexMetaData metaData = IndexMetaData.builder(index.getName()).settings(build).build(); - return new IndexSettings(metaData, Settings.EMPTY, Collections.emptyList()); + Set> settingSet = new HashSet<>(IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); + if (setting.length > 0) { + settingSet.addAll(Arrays.asList(setting)); + } + return new IndexSettings(metaData, Settings.EMPTY, (idx) -> Regex.simpleMatch(idx, metaData.getIndex()), new IndexScopedSettings(Settings.EMPTY, settingSet)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java new file mode 100644 index 00000000000..64719f0f9de --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalSettingsPlugin.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test; + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.SettingsModule; +import org.elasticsearch.plugins.Plugin; + +public final class InternalSettingsPlugin extends Plugin { + @Override + public String name() { + return "internal-settings-plugin"; + } + + @Override + public String description() { + return "a plugin that allows to set values for internal settings which are can't be set via the ordinary API without this pluging installed"; + } + + public static final Setting VERSION_CREATED = Setting.intSetting("index.version.created", 0, false, Setting.Scope.INDEX); + public static final Setting MERGE_ENABLED = Setting.boolSetting("index.merge.enabled", true, false, Setting.Scope.INDEX); + public static final Setting INDEX_CREATION_DATE_SETTING = Setting.longSetting(IndexMetaData.SETTING_CREATION_DATE, -1, -1, false, Setting.Scope.INDEX); + + public void onModule(SettingsModule module) { + module.registerSetting(VERSION_CREATED); + module.registerSetting(MERGE_ENABLED); + module.registerSetting(INDEX_CREATION_DATE_SETTING); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index c0946ccefca..9fb3eec8489 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -62,6 +62,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.discovery.DiscoveryService; +import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexModule; @@ -114,6 +115,7 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -144,13 +146,6 @@ public final class InternalTestCluster extends TestCluster { private final ESLogger logger = Loggers.getLogger(getClass()); - static NodeConfigurationSource DEFAULT_SETTINGS_SOURCE = NodeConfigurationSource.EMPTY; - - /** - * A node level setting that holds a per node random seed that is consistent across node restarts - */ - public static final String SETTING_CLUSTER_NODE_SEED = "test.cluster.node.seed"; - /** * The number of ports in the range used for this JVM */ @@ -221,14 +216,16 @@ public final class InternalTestCluster extends TestCluster { private ServiceDisruptionScheme activeDisruptionScheme; private String nodeMode; + private Function clientWrapper; public InternalTestCluster(String nodeMode, long clusterSeed, Path baseDir, int minNumDataNodes, int maxNumDataNodes, String clusterName, NodeConfigurationSource nodeConfigurationSource, int numClientNodes, - boolean enableHttpPipelining, String nodePrefix, Collection> mockPlugins) { + boolean enableHttpPipelining, String nodePrefix, Collection> mockPlugins, Function clientWrapper) { super(clusterSeed); if ("network".equals(nodeMode) == false && "local".equals(nodeMode) == false) { throw new IllegalArgumentException("Unknown nodeMode: " + nodeMode); } + this.clientWrapper = clientWrapper; this.nodeMode = nodeMode; this.baseDir = baseDir; this.clusterName = clusterName; @@ -285,12 +282,12 @@ public final class InternalTestCluster extends TestCluster { for (int i = 0; i < numOfDataPaths; i++) { dataPath.append(baseDir.resolve("d" + i).toAbsolutePath()).append(','); } - builder.put("path.data", dataPath.toString()); + builder.put(Environment.PATH_DATA_SETTING.getKey(), dataPath.toString()); } } - builder.put("path.shared_data", baseDir.resolve("custom")); - builder.put("path.home", baseDir); - builder.put("path.repo", baseDir.resolve("repos")); + builder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), baseDir.resolve("custom")); + builder.put(Environment.PATH_HOME_SETTING.getKey(), baseDir); + builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); builder.put("transport.tcp.port", TRANSPORT_BASE_PORT + "-" + (TRANSPORT_BASE_PORT + PORTS_PER_CLUSTER)); builder.put("http.port", HTTP_BASE_PORT + "-" + (HTTP_BASE_PORT + PORTS_PER_CLUSTER)); builder.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true); @@ -379,8 +376,7 @@ public final class InternalTestCluster extends TestCluster { private Settings getRandomNodeSettings(long seed) { Random random = new Random(seed); - Builder builder = Settings.settingsBuilder() - .put(SETTING_CLUSTER_NODE_SEED, seed); + Builder builder = Settings.settingsBuilder(); if (isLocalTransportConfigured() == false) { builder.put(Transport.TRANSPORT_TCP_COMPRESS.getKey(), rarely(random)); } @@ -388,12 +384,12 @@ public final class InternalTestCluster extends TestCluster { builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, PageCacheRecycler.Type.values())); } if (random.nextInt(10) == 0) { // 10% of the nodes have a very frequent check interval - builder.put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueMillis(10 + random.nextInt(2000))); + builder.put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(10 + random.nextInt(2000))); } else if (random.nextInt(10) != 0) { // 90% of the time - 10% of the time we don't set anything - builder.put(SearchService.KEEPALIVE_INTERVAL_KEY, TimeValue.timeValueSeconds(10 + random.nextInt(5 * 60))); + builder.put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueSeconds(10 + random.nextInt(5 * 60))); } if (random.nextBoolean()) { // sometimes set a - builder.put(SearchService.DEFAULT_KEEPALIVE_KEY, TimeValue.timeValueSeconds(100 + random.nextInt(5 * 60))); + builder.put(SearchService.DEFAULT_KEEPALIVE_SETTING.getKey(), TimeValue.timeValueSeconds(100 + random.nextInt(5 * 60))); } if (random.nextInt(10) == 0) { @@ -425,11 +421,11 @@ public final class InternalTestCluster extends TestCluster { } if (random.nextBoolean()) { - builder.put(IndexModule.QUERY_CACHE_TYPE, random.nextBoolean() ? IndexModule.INDEX_QUERY_CACHE : IndexModule.NONE_QUERY_CACHE); + builder.put(IndexModule.INDEX_QUERY_CACHE_TYPE_SETTING.getKey(), random.nextBoolean() ? IndexModule.INDEX_QUERY_CACHE : IndexModule.NONE_QUERY_CACHE); } if (random.nextBoolean()) { - builder.put(IndexModule.QUERY_CACHE_EVERYTHING, random.nextBoolean()); + builder.put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), random.nextBoolean()); } if (random.nextBoolean()) { @@ -456,14 +452,14 @@ public final class InternalTestCluster extends TestCluster { } if (random.nextBoolean()) { - builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, RandomInts.randomIntBetween(random, -100, 2000)); + builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING.getKey(), RandomInts.randomIntBetween(random, 0, 2000)); } if (random.nextBoolean()) { builder.put(ScriptService.SCRIPT_CACHE_EXPIRE_SETTING, TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 750, 10000000))); } // always default delayed allocation to 0 to make sure we have tests are not delayed - builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, 0); + builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0); return builder.build(); } @@ -593,10 +589,10 @@ public final class InternalTestCluster extends TestCluster { String name = buildNodeName(nodeId); assert !nodes.containsKey(name); Settings finalSettings = settingsBuilder() - .put("path.home", baseDir) // allow overriding path.home + .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) // allow overriding path.home .put(settings) .put("name", name) - .put(DiscoveryService.SETTING_DISCOVERY_SEED, seed) + .put(DiscoveryService.DISCOVERY_SEED_SETTING.getKey(), seed) .build(); MockNode node = new MockNode(finalSettings, version, plugins); return new NodeAndClient(name, node); @@ -677,7 +673,7 @@ public final class InternalTestCluster extends TestCluster { Builder builder = settingsBuilder().put(settings).put("node.client", true); if (size() == 0) { // if we are the first node - don't wait for a state - builder.put("discovery.initial_state_timeout", 0); + builder.put(DiscoveryService.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0); } String name = startNode(builder); return nodes.get(name).nodeClient(); @@ -798,20 +794,20 @@ public final class InternalTestCluster extends TestCluster { } private Client getOrBuildNodeClient() { - if (nodeClient != null) { - return nodeClient; + if (nodeClient == null) { + nodeClient = node.client(); } - return nodeClient = node.client(); + return clientWrapper.apply(nodeClient); } private Client getOrBuildTransportClient() { - if (transportClient != null) { - return transportClient; + if (transportClient == null) { + /* no sniff client for now - doesn't work will all tests since it might throw NoNodeAvailableException if nodes are shut down. + * we first need support of transportClientRatio as annotations or so + */ + transportClient = new TransportClientFactory(false, nodeConfigurationSource.transportClientSettings(), baseDir, nodeMode, nodeConfigurationSource.transportClientPlugins()).client(node, clusterName); } - /* no sniff client for now - doesn't work will all tests since it might throw NoNodeAvailableException if nodes are shut down. - * we first need support of transportClientRatio as annotations or so - */ - return transportClient = new TransportClientFactory(false, nodeConfigurationSource.transportClientSettings(), baseDir, nodeMode, nodeConfigurationSource.transportClientPlugins()).client(node, clusterName); + return clientWrapper.apply(transportClient); } void resetClient() throws IOException { @@ -843,8 +839,8 @@ public final class InternalTestCluster extends TestCluster { IOUtils.rm(nodeEnv.nodeDataPaths()); } } - final long newIdSeed = node.settings().getAsLong(DiscoveryService.SETTING_DISCOVERY_SEED, 0l) + 1; // use a new seed to make sure we have new node id - Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(DiscoveryService.SETTING_DISCOVERY_SEED, newIdSeed).build(); + final long newIdSeed = DiscoveryService.DISCOVERY_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id + Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(DiscoveryService.DISCOVERY_SEED_SETTING.getKey(), newIdSeed).build(); Collection> plugins = node.getPlugins(); Version version = node.getVersion(); node = new MockNode(finalSettings, version, plugins); @@ -889,7 +885,7 @@ public final class InternalTestCluster extends TestCluster { Settings nodeSettings = node.settings(); Builder builder = settingsBuilder() .put("client.transport.nodes_sampler_interval", "1s") - .put("path.home", baseDir) + .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) .put("name", TRANSPORT_CLIENT_PREFIX + node.settings().get("name")) .put(ClusterName.SETTING, clusterName).put("client.transport.sniff", sniff) .put("node.mode", nodeSettings.get("node.mode", nodeMode)) diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java index c02c1d8503f..13f533a583e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java @@ -21,7 +21,9 @@ package org.elasticsearch.test; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; @@ -58,6 +60,14 @@ public final class MockIndexEventListener { return "a mock index listener for testing only"; } + /** + * For tests to pass in to fail on listener invocation + */ + public static final Setting INDEX_FAIL = Setting.boolSetting("index.fail", false, false, Setting.Scope.INDEX); + public void onModule(SettingsModule module) { + module.registerSetting(INDEX_FAIL); + } + @Override public void onIndexModule(IndexModule module) { module.addIndexEventListener(listener); diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java index e549c185616..71c1cc24fa7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; @@ -35,7 +36,7 @@ import java.util.Set; public class ClusterDiscoveryConfiguration extends NodeConfigurationSource { - static Settings DEFAULT_NODE_SETTINGS = Settings.settingsBuilder().put("discovery.type", "zen").build(); + static Settings DEFAULT_NODE_SETTINGS = Settings.settingsBuilder().put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen").build(); private static final String IP_ADDR = "127.0.0.1"; final int numOfNodes; diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index 50d75eb3716..ddccfe88e38 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -95,7 +95,7 @@ public final class MockEngineSupport { shardId = config.getShardId(); filterCache = config.getQueryCache(); filterCachingPolicy = config.getQueryCachingPolicy(); - final long seed = settings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l); + final long seed = config.getIndexSettings().getValue(ESIntegTestCase.INDEX_TEST_SEED_SETTING); Random random = new Random(seed); final double ratio = WRAP_READER_RATIO.get(settings); boolean wrapReader = random.nextDouble() < ratio; diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 61755f7ecb4..cb3bbc7436b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -731,82 +731,6 @@ public class ElasticsearchAssertions { return response; } - public static void assertNodeContainsPlugins(NodesInfoResponse response, String nodeId, - List expectedJvmPluginNames, - List expectedJvmPluginDescriptions, - List expectedJvmVersions, - List expectedSitePluginNames, - List expectedSitePluginDescriptions, - List expectedSiteVersions) { - - Assert.assertThat(response.getNodesMap().get(nodeId), notNullValue()); - - PluginsAndModules plugins = response.getNodesMap().get(nodeId).getPlugins(); - Assert.assertThat(plugins, notNullValue()); - - List pluginNames = filterAndMap(plugins, jvmPluginPredicate, nameFunction); - for (String expectedJvmPluginName : expectedJvmPluginNames) { - Assert.assertThat(pluginNames, hasItem(expectedJvmPluginName)); - } - - List pluginDescriptions = filterAndMap(plugins, jvmPluginPredicate, descriptionFunction); - for (String expectedJvmPluginDescription : expectedJvmPluginDescriptions) { - Assert.assertThat(pluginDescriptions, hasItem(expectedJvmPluginDescription)); - } - - List jvmPluginVersions = filterAndMap(plugins, jvmPluginPredicate, versionFunction); - for (String pluginVersion : expectedJvmVersions) { - Assert.assertThat(jvmPluginVersions, hasItem(pluginVersion)); - } - - boolean anyHaveUrls = - plugins - .getPluginInfos() - .stream() - .filter(jvmPluginPredicate.and(sitePluginPredicate.negate())) - .map(urlFunction) - .anyMatch(p -> p != null); - assertFalse(anyHaveUrls); - - List sitePluginNames = filterAndMap(plugins, sitePluginPredicate, nameFunction); - - Assert.assertThat(sitePluginNames.isEmpty(), is(expectedSitePluginNames.isEmpty())); - for (String expectedSitePluginName : expectedSitePluginNames) { - Assert.assertThat(sitePluginNames, hasItem(expectedSitePluginName)); - } - - List sitePluginDescriptions = filterAndMap(plugins, sitePluginPredicate, descriptionFunction); - Assert.assertThat(sitePluginDescriptions.isEmpty(), is(expectedSitePluginDescriptions.isEmpty())); - for (String sitePluginDescription : expectedSitePluginDescriptions) { - Assert.assertThat(sitePluginDescriptions, hasItem(sitePluginDescription)); - } - - List sitePluginUrls = filterAndMap(plugins, sitePluginPredicate, urlFunction); - Assert.assertThat(sitePluginUrls, not(contains(nullValue()))); - - List sitePluginVersions = filterAndMap(plugins, sitePluginPredicate, versionFunction); - Assert.assertThat(sitePluginVersions.isEmpty(), is(expectedSiteVersions.isEmpty())); - for (String pluginVersion : expectedSiteVersions) { - Assert.assertThat(sitePluginVersions, hasItem(pluginVersion)); - } - } - - private static List filterAndMap(PluginsAndModules pluginsInfo, Predicate predicate, Function function) { - return pluginsInfo.getPluginInfos().stream().filter(predicate).map(function).collect(Collectors.toList()); - } - - private static Predicate jvmPluginPredicate = p -> p.isJvm(); - - private static Predicate sitePluginPredicate = p -> p.isSite(); - - private static Function nameFunction = p -> p.getName(); - - private static Function descriptionFunction = p -> p.getDescription(); - - private static Function urlFunction = p -> p.getUrl(); - - private static Function versionFunction = p -> p.getVersion(); - /** * Check if a file exists */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 969d59d885e..1c9bddb6b45 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.internal.AssumptionViolatedException; import org.junit.runner.Description; import org.junit.runner.notification.Failure; @@ -39,7 +40,6 @@ import static org.elasticsearch.test.ESIntegTestCase.TESTS_CLUSTER; import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_BLACKLIST; import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_SPEC; import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_SUITE; -import static org.elasticsearch.test.rest.ESRestTestCase.Rest; /** * A {@link RunListener} that emits to {@link System#err} a string with command @@ -82,7 +82,7 @@ public class ReproduceInfoPrinter extends RunListener { gradleMessageBuilder.appendAllOpts(failure.getDescription()); //Rest tests are a special case as they allow for additional parameters - if (failure.getDescription().getTestClass().isAnnotationPresent(Rest.class)) { + if (ESRestTestCase.class.isAssignableFrom(failure.getDescription().getTestClass())) { gradleMessageBuilder.appendRestTestsProperties(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index b4aecd52a14..5684717342d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -20,20 +20,12 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.carrotsearch.randomizedtesting.annotations.TestGroup; -import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.apache.lucene.util.LuceneTestCase.SuppressFsync; -import org.apache.lucene.util.TimeUnits; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.node.Node; -import org.elasticsearch.repositories.uri.URLRepository; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.client.RestException; import org.elasticsearch.test.rest.parser.RestTestParseException; import org.elasticsearch.test.rest.parser.RestTestSuiteParser; @@ -45,17 +37,14 @@ import org.elasticsearch.test.rest.section.TestSection; import org.elasticsearch.test.rest.spec.RestApi; import org.elasticsearch.test.rest.spec.RestSpec; import org.elasticsearch.test.rest.support.FileUtils; +import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import java.io.IOException; import java.io.InputStream; -import java.lang.annotation.ElementType; -import java.lang.annotation.Inherited; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; +import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; @@ -67,6 +56,7 @@ import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -74,27 +64,7 @@ import java.util.Set; /** * Runs the clients test suite against an elasticsearch cluster. */ -@ESRestTestCase.Rest -@SuppressFsync // we aren't trying to test this here, and it can make the test slow -@SuppressCodecs("*") // requires custom completion postings format -@ClusterScope(randomDynamicTemplates = false) -@TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // timeout the suite after 40min and fail the test. -public abstract class ESRestTestCase extends ESIntegTestCase { - - /** - * Property that allows to control whether the REST tests are run (default) or not - */ - public static final String TESTS_REST = "tests.rest"; - - /** - * Annotation for REST tests - */ - @Inherited - @Retention(RetentionPolicy.RUNTIME) - @Target(ElementType.TYPE) - @TestGroup(enabled = true, sysProperty = ESRestTestCase.TESTS_REST) - public @interface Rest { - } +public abstract class ESRestTestCase extends ESTestCase { /** * Property that allows to control which REST tests get run. Supports comma separated list of tests @@ -132,7 +102,9 @@ public abstract class ESRestTestCase extends ESIntegTestCase { private static final String PATHS_SEPARATOR = "(? blacklistPathMatchers = new ArrayList<>(); + private final URL[] clusterUrls; private static RestTestExecutionContext restTestExecutionContext; + private static RestTestExecutionContext adminExecutionContext; private final RestTestCandidate testCandidate; @@ -142,6 +114,20 @@ public abstract class ESRestTestCase extends ESIntegTestCase { for (String entry : blacklist) { this.blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); } + String cluster = System.getProperty("tests.rest.cluster"); + if (cluster == null) { + throw new RuntimeException("Must specify tests.rest.cluster for rest tests"); + } + String[] stringUrls = cluster.split(","); + clusterUrls = new URL[stringUrls.length]; + int i = 0; + try { + for (String stringUrl : stringUrls) { + clusterUrls[i++] = new URL("http://" + stringUrl); + } + } catch (IOException e) { + throw new RuntimeException("Failed to parse cluster addresses for rest test", e); + } } @Override @@ -150,28 +136,7 @@ public abstract class ESRestTestCase extends ESIntegTestCase { super.afterIfFailed(errors); } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .putArray(URLRepository.ALLOWED_URLS_SETTING, "http://snapshot.test*") - .put(Node.HTTP_ENABLED, true) - .put("node.testattr", "test") - .put(super.nodeSettings(nodeOrdinal)).build(); - } - public static Iterable createParameters(int id, int count) throws IOException, RestTestParseException { - TestGroup testGroup = Rest.class.getAnnotation(TestGroup.class); - String sysProperty = TestGroup.Utilities.getSysProperty(Rest.class); - boolean enabled; - try { - enabled = RandomizedTest.systemPropertyAsBoolean(sysProperty, testGroup.enabled()); - } catch (IllegalArgumentException e) { - // Ignore malformed system property, disable the group if malformed though. - enabled = false; - } - if (!enabled) { - return new ArrayList<>(); - } //parse tests only if rest test group is enabled, otherwise rest tests might not even be available on file system List restTestCandidates = collectTestCandidates(id, count); List objects = new ArrayList<>(); @@ -274,6 +239,7 @@ public abstract class ESRestTestCase extends ESIntegTestCase { } validateSpec(restSpec); restTestExecutionContext = new RestTestExecutionContext(restSpec); + adminExecutionContext = new RestTestExecutionContext(restSpec); } private static void validateSpec(RestSpec restSpec) { @@ -293,27 +259,42 @@ public abstract class ESRestTestCase extends ESIntegTestCase { } } + @After + public void wipeCluster() throws Exception { + + // wipe indices + Map deleteIndicesArgs = new HashMap<>(); + deleteIndicesArgs.put("index", "*"); + try { + adminExecutionContext.callApi("indices.delete", deleteIndicesArgs, Collections.emptyList(), Collections.emptyMap()); + } catch (RestException e) { + // 404 here just means we had no indexes + if (e.statusCode() != 404) { + throw e; + } + } + + // wipe index templates + Map deleteTemplatesArgs = new HashMap<>(); + deleteTemplatesArgs.put("name", "*"); + adminExecutionContext.callApi("indices.delete_template", deleteTemplatesArgs, Collections.emptyList(), Collections.emptyMap()); + + // wipe snapshots + Map deleteSnapshotsArgs = new HashMap<>(); + deleteSnapshotsArgs.put("repository", "*"); + adminExecutionContext.callApi("snapshot.delete_repository", deleteSnapshotsArgs, Collections.emptyList(), Collections.emptyMap()); + } + @AfterClass public static void close() { if (restTestExecutionContext != null) { restTestExecutionContext.close(); + adminExecutionContext.close(); restTestExecutionContext = null; + adminExecutionContext = null; } } - @Override - protected int maximumNumberOfShards() { - return 3; // never go crazy in the REST tests - } - - @Override - protected int maximumNumberOfReplicas() { - // hardcoded 1 since this is what clients also do and our tests must expect that we have only node - // with replicas set to 1 ie. the cluster won't be green - return 1; - - } - /** * Used to obtain settings for the REST client that is used to send REST requests. */ @@ -321,15 +302,29 @@ public abstract class ESRestTestCase extends ESIntegTestCase { return Settings.EMPTY; } + /** Returns the REST client settings used for admin actions like cleaning up after the test has completed. */ + protected Settings restAdminSettings() { + return restClientSettings(); // default to the same client settings + } + + /** Returns the addresses the client uses to connect to the test cluster. */ + protected URL[] getClusterUrls() { + return clusterUrls; + } + @Before public void reset() throws IOException, RestException { + // admin context must be available for @After always, regardless of whether the test was blacklisted + adminExecutionContext.initClient(clusterUrls, restAdminSettings()); + adminExecutionContext.clear(); + //skip test if it matches one of the blacklist globs for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) { String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName(); assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher.isSuffixMatch(testPath)); } //The client needs non static info to get initialized, therefore it can't be initialized in the before class - restTestExecutionContext.initClient(cluster().httpAddresses(), restClientSettings()); + restTestExecutionContext.initClient(clusterUrls, restClientSettings()); restTestExecutionContext.clear(); //skip test if the whole suite (yaml file) is disabled diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index 4054b8efce1..83860b18bd9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -31,6 +31,7 @@ import org.elasticsearch.test.rest.spec.RestSpec; import java.io.Closeable; import java.io.IOException; import java.net.InetSocketAddress; +import java.net.URL; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -119,9 +120,9 @@ public class RestTestExecutionContext implements Closeable { /** * Creates the embedded REST client when needed. Needs to be called before each test. */ - public void initClient(InetSocketAddress[] addresses, Settings settings) throws IOException, RestException { + public void initClient(URL[] urls, Settings settings) throws IOException, RestException { if (restClient == null) { - restClient = new RestClient(restSpec, settings, addresses); + restClient = new RestClient(restSpec, settings, urls); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java index 63a8b397c45..2b6ded9a5f5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; @@ -48,6 +49,7 @@ import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.net.InetSocketAddress; +import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.security.KeyManagementException; @@ -80,18 +82,18 @@ public class RestClient implements Closeable { private final RestSpec restSpec; private final CloseableHttpClient httpClient; private final Headers headers; - private final InetSocketAddress[] addresses; + private final URL[] urls; private final Version esVersion; - public RestClient(RestSpec restSpec, Settings settings, InetSocketAddress[] addresses) throws IOException, RestException { - assert addresses.length > 0; + public RestClient(RestSpec restSpec, Settings settings, URL[] urls) throws IOException, RestException { + assert urls.length > 0; this.restSpec = restSpec; this.headers = new Headers(settings); this.protocol = settings.get(PROTOCOL, "http"); this.httpClient = createHttpClient(settings); - this.addresses = addresses; + this.urls = urls; this.esVersion = readAndCheckVersion(); - logger.info("REST client initialized {}, elasticsearch version: [{}]", addresses, esVersion); + logger.info("REST client initialized {}, elasticsearch version: [{}]", urls, esVersion); } private Version readAndCheckVersion() throws IOException, RestException { @@ -102,8 +104,8 @@ public class RestClient implements Closeable { assert restApi.getMethods().size() == 1; String version = null; - for (InetSocketAddress address : addresses) { - RestResponse restResponse = new RestResponse(httpRequestBuilder(address) + for (URL url : urls) { + RestResponse restResponse = new RestResponse(httpRequestBuilder(url) .path(restApi.getPaths().get(0)) .method(restApi.getMethods().get(0)).execute()); checkStatusCode(restResponse); @@ -152,6 +154,8 @@ public class RestClient implements Closeable { HttpRequestBuilder httpRequestBuilder = callApiBuilder(apiName, requestParams, body); for (Map.Entry header : headers.entrySet()) { + logger.error("Adding header " + header.getKey()); + logger.error(" with value " + header.getValue()); httpRequestBuilder.addHeader(header.getKey(), header.getValue()); } logger.debug("calling api [{}]", apiName); @@ -246,17 +250,18 @@ public class RestClient implements Closeable { return restApi; } - protected HttpRequestBuilder httpRequestBuilder(InetSocketAddress address) { + protected HttpRequestBuilder httpRequestBuilder(URL url) { return new HttpRequestBuilder(httpClient) .addHeaders(headers) .protocol(protocol) - .host(NetworkAddress.formatAddress(address.getAddress())).port(address.getPort()); + .host(url.getHost()) + .port(url.getPort()); } protected HttpRequestBuilder httpRequestBuilder() { //the address used is randomized between the available ones - InetSocketAddress address = RandomizedTest.randomFrom(addresses); - return httpRequestBuilder(address); + URL url = RandomizedTest.randomFrom(urls); + return httpRequestBuilder(url); } protected CloseableHttpClient createHttpClient(Settings settings) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java index 036310e4e47..b99bf3475da 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java @@ -136,7 +136,7 @@ public class RestTestSuiteParseContext { token = parser.nextToken(); } if (token != XContentParser.Token.FIELD_NAME) { - throw new RestTestParseException("malformed test section: field name expected but found " + token); + throw new RestTestParseException("malformed test section: field name expected but found " + token + " at " + parser.getTokenLocation()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java index e8422887ad4..ba1b99288e3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java @@ -18,11 +18,6 @@ */ package org.elasticsearch.test.rest.parser; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.rest.section.RestTestSuite; -import org.elasticsearch.test.rest.section.TestSection; - import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; @@ -30,6 +25,11 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.yaml.YamlXContent; +import org.elasticsearch.test.rest.section.RestTestSuite; +import org.elasticsearch.test.rest.section.TestSection; + /** * Parser for a complete test suite (yaml file) */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 25c29f0c6c9..ef3be122cdb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -32,11 +32,13 @@ import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.StoreRateLimiting; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestRuleMarkFailure; +import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -57,13 +59,15 @@ import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.Random; +import java.util.Set; public class MockFSDirectoryService extends FsDirectoryService { - public static final String RANDOM_IO_EXCEPTION_RATE_ON_OPEN = "index.store.mock.random.io_exception_rate_on_open"; - public static final String RANDOM_PREVENT_DOUBLE_WRITE = "index.store.mock.random.prevent_double_write"; - public static final String RANDOM_NO_DELETE_OPEN_FILE = "index.store.mock.random.no_delete_open_file"; - public static final String CRASH_INDEX = "index.store.mock.random.crash_index"; + public static final Setting RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING = Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting RANDOM_IO_EXCEPTION_RATE_SETTING = Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, false, Setting.Scope.INDEX); + public static final Setting RANDOM_PREVENT_DOUBLE_WRITE_SETTING = Setting.boolSetting("index.store.mock.random.prevent_double_write", true, false, Setting.Scope.INDEX);// true is default in MDW + public static final Setting RANDOM_NO_DELETE_OPEN_FILE_SETTING = Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, false, Setting.Scope.INDEX);// true is default in MDW + public static final Setting CRASH_INDEX_SETTING = Setting.boolSetting("index.store.mock.random.crash_index", true, false, Setting.Scope.INDEX);// true is default in MDW private final FsDirectoryService delegateService; private final Random random; @@ -78,16 +82,16 @@ public class MockFSDirectoryService extends FsDirectoryService { public MockFSDirectoryService(IndexSettings idxSettings, IndexStore indexStore, final ShardPath path) { super(idxSettings, indexStore, path); Settings indexSettings = idxSettings.getSettings(); - final long seed = indexSettings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l); + final long seed = idxSettings.getValue(ESIntegTestCase.INDEX_TEST_SEED_SETTING); this.random = new Random(seed); - randomIOExceptionRate = indexSettings.getAsDouble(RANDOM_IO_EXCEPTION_RATE, 0.0d); - randomIOExceptionRateOnOpen = indexSettings.getAsDouble(RANDOM_IO_EXCEPTION_RATE_ON_OPEN, 0.0d); - preventDoubleWrite = indexSettings.getAsBoolean(RANDOM_PREVENT_DOUBLE_WRITE, true); // true is default in MDW - noDeleteOpenFile = indexSettings.getAsBoolean(RANDOM_NO_DELETE_OPEN_FILE, random.nextBoolean()); // true is default in MDW + randomIOExceptionRate = RANDOM_IO_EXCEPTION_RATE_SETTING.get(indexSettings); + randomIOExceptionRateOnOpen = RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.get(indexSettings); + preventDoubleWrite = RANDOM_PREVENT_DOUBLE_WRITE_SETTING.get(indexSettings); + noDeleteOpenFile = RANDOM_NO_DELETE_OPEN_FILE_SETTING.exists(indexSettings) ? RANDOM_NO_DELETE_OPEN_FILE_SETTING.get(indexSettings) : random.nextBoolean(); random.nextInt(shardId.getId() + 1); // some randomness per shard throttle = MockDirectoryWrapper.Throttling.NEVER; - crashIndex = indexSettings.getAsBoolean(CRASH_INDEX, true); + crashIndex = CRASH_INDEX_SETTING.get(indexSettings); if (logger.isDebugEnabled()) { logger.debug("Using MockDirWrapper with seed [{}] throttle: [{}] crashIndex: [{}]", SeedUtils.formatSeed(seed), @@ -161,8 +165,6 @@ public class MockFSDirectoryService extends FsDirectoryService { return delegateService.throttleTimeInNanos(); } - public static final String RANDOM_IO_EXCEPTION_RATE = "index.store.mock.random.io_exception_rate"; - private Directory wrap(Directory dir) { final ElasticsearchMockDirectoryWrapper w = new ElasticsearchMockDirectoryWrapper(random, dir, this.crashIndex); w.setRandomIOExceptionRate(randomIOExceptionRate); @@ -180,8 +182,8 @@ public class MockFSDirectoryService extends FsDirectoryService { private FsDirectoryService randomDirectorService(IndexStore indexStore, ShardPath path) { final IndexSettings indexSettings = indexStore.getIndexSettings(); - final IndexMetaData build = IndexMetaData.builder(indexSettings.getIndexMetaData()).settings(Settings.builder().put(indexSettings.getSettings()).put(IndexModule.STORE_TYPE, RandomPicks.randomFrom(random, IndexModule.Type.values()).getSettingsKey())).build(); - final IndexSettings newIndexSettings = new IndexSettings(build, indexSettings.getNodeSettings(), Collections.emptyList()); + final IndexMetaData build = IndexMetaData.builder(indexSettings.getIndexMetaData()).settings(Settings.builder().put(indexSettings.getSettings()).put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), RandomPicks.randomFrom(random, IndexModule.Type.values()).getSettingsKey())).build(); + final IndexSettings newIndexSettings = new IndexSettings(build, indexSettings.getNodeSettings()); return new FsDirectoryService(newIndexSettings, indexStore, path); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java index 3fe700701dd..80251d54951 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -22,7 +22,9 @@ package org.elasticsearch.test.store; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexEventListener; @@ -42,7 +44,7 @@ import java.util.Map; public class MockFSIndexStore extends IndexStore { - public static final String CHECK_INDEX_ON_CLOSE = "index.store.mock.check_index_on_close"; + public static final Setting INDEX_CHECK_INDEX_ON_CLOSE_SETTING = Setting.boolSetting("index.store.mock.check_index_on_close", true, false, Setting.Scope.INDEX); public static class TestPlugin extends Plugin { @Override @@ -55,14 +57,24 @@ public class MockFSIndexStore extends IndexStore { } @Override public Settings additionalSettings() { - return Settings.builder().put(IndexModule.STORE_TYPE, "mock").build(); + return Settings.builder().put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "mock").build(); + } + + public void onModule(SettingsModule module) { + + module.registerSetting(INDEX_CHECK_INDEX_ON_CLOSE_SETTING); + module.registerSetting(MockFSDirectoryService.CRASH_INDEX_SETTING); + module.registerSetting(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING); + module.registerSetting(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING); + module.registerSetting(MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING); + module.registerSetting(MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING); } @Override public void onIndexModule(IndexModule indexModule) { Settings indexSettings = indexModule.getSettings(); - if ("mock".equals(indexSettings.get(IndexModule.STORE_TYPE))) { - if (indexSettings.getAsBoolean(CHECK_INDEX_ON_CLOSE, true)) { + if ("mock".equals(indexSettings.get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey()))) { + if (INDEX_CHECK_INDEX_ON_CLOSE_SETTING.get(indexSettings)) { indexModule.addIndexEventListener(new Listener()); } indexModule.addIndexStore("mock", MockFSIndexStore::new); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java index 8b395003576..d66acb7ff06 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -60,8 +61,10 @@ public class AssertingLocalTransport extends LocalTransport { } } - public static final String ASSERTING_TRANSPORT_MIN_VERSION_KEY = "transport.asserting.version.min"; - public static final String ASSERTING_TRANSPORT_MAX_VERSION_KEY = "transport.asserting.version.max"; + public static final Setting ASSERTING_TRANSPORT_MIN_VERSION_KEY = new Setting<>("transport.asserting.version.min", + Version.CURRENT.minimumCompatibilityVersion().toString(), Version::fromString, false, Setting.Scope.CLUSTER); + public static final Setting ASSERTING_TRANSPORT_MAX_VERSION_KEY = new Setting<>("transport.asserting.version.max", + Version.CURRENT.toString(), Version::fromString, false, Setting.Scope.CLUSTER); private final Random random; private final Version minVersion; private final Version maxVersion; @@ -69,10 +72,10 @@ public class AssertingLocalTransport extends LocalTransport { @Inject public AssertingLocalTransport(Settings settings, ThreadPool threadPool, Version version, NamedWriteableRegistry namedWriteableRegistry) { super(settings, threadPool, version, namedWriteableRegistry); - final long seed = settings.getAsLong(ESIntegTestCase.SETTING_INDEX_SEED, 0l); + final long seed = ESIntegTestCase.INDEX_TEST_SEED_SETTING.get(settings); random = new Random(seed); - minVersion = settings.getAsVersion(ASSERTING_TRANSPORT_MIN_VERSION_KEY, Version.V_0_18_0); - maxVersion = settings.getAsVersion(ASSERTING_TRANSPORT_MAX_VERSION_KEY, Version.CURRENT); + minVersion = ASSERTING_TRANSPORT_MIN_VERSION_KEY.get(settings); + maxVersion = ASSERTING_TRANSPORT_MAX_VERSION_KEY.get(settings); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java index eefdb996e65..48d79ef064b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java @@ -16,9 +16,11 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.test.transport; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -26,6 +28,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.SendRequestTransportException; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; @@ -40,9 +43,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; /** A transport class that doesn't send anything but rather captures all requests for inspection from tests */ public class CapturingTransport implements Transport { + private TransportServiceAdapter adapter; static public class CapturedRequest { @@ -59,6 +65,7 @@ public class CapturingTransport implements Transport { } } + private ConcurrentMap> requests = new ConcurrentHashMap<>(); private BlockingQueue capturedRequests = ConcurrentCollections.newBlockingQueue(); /** returns all requests captured so far. Doesn't clear the captured request list. See {@link #clear()} */ @@ -120,14 +127,50 @@ public class CapturingTransport implements Transport { adapter.onResponseReceived(requestId).handleResponse(response); } - /** simulate a remote error for the given requesTId */ - public void handleResponse(final long requestId, final Throwable t) { - adapter.onResponseReceived(requestId).handleException(new RemoteTransportException("remote failure", t)); + /** + * simulate a local error for the given requestId, will be wrapped + * by a {@link SendRequestTransportException} + * + * @param requestId the id corresponding to the captured send + * request + * @param t the failure to wrap + */ + public void handleLocalError(final long requestId, final Throwable t) { + Tuple request = requests.get(requestId); + assert request != null; + this.handleError(requestId, new SendRequestTransportException(request.v1(), request.v2(), t)); } + /** + * simulate a remote error for the given requestId, will be wrapped + * by a {@link RemoteTransportException} + * + * @param requestId the id corresponding to the captured send + * request + * @param t the failure to wrap + */ + public void handleRemoteError(final long requestId, final Throwable t) { + this.handleError(requestId, new RemoteTransportException("remote failure", t)); + } + + /** + * simulate an error for the given requestId, unlike + * {@link #handleLocalError(long, Throwable)} and + * {@link #handleRemoteError(long, Throwable)}, the provided + * exception will not be wrapped but will be delivered to the + * transport layer as is + * + * @param requestId the id corresponding to the captured send + * request + * @param e the failure + */ + public void handleError(final long requestId, final TransportException e) { + adapter.onResponseReceived(requestId).handleException(e); + } @Override public void sendRequest(DiscoveryNode node, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException, TransportException { + requests.put(requestId, Tuple.tuple(node, action)); capturedRequests.add(new CapturedRequest(node, requestId, action, request)); } @@ -149,7 +192,6 @@ public class CapturingTransport implements Transport { @Override public TransportAddress[] addressesFromString(String address, int perAddressLimit) throws Exception { - // WTF return new TransportAddress[0]; } @@ -217,4 +259,5 @@ public class CapturingTransport implements Transport { public List getLocalAddresses() { return Collections.emptyList(); } + } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 0a8869b20cf..985c8a86838 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -34,7 +34,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.RequestHandlerRegistry; @@ -283,7 +282,7 @@ public class MockTransportService extends TransportService { } // TODO: Replace with proper setting - TimeValue connectingTimeout = NetworkService.TcpSettings.TCP_DEFAULT_CONNECT_TIMEOUT; + TimeValue connectingTimeout = NetworkService.TcpSettings.TCP_CONNECT_TIMEOUT.getDefault(Settings.EMPTY); try { if (delay.millis() < connectingTimeout.millis()) { Thread.sleep(delay.millis()); @@ -306,7 +305,7 @@ public class MockTransportService extends TransportService { } // TODO: Replace with proper setting - TimeValue connectingTimeout = NetworkService.TcpSettings.TCP_DEFAULT_CONNECT_TIMEOUT; + TimeValue connectingTimeout = NetworkService.TcpSettings.TCP_CONNECT_TIMEOUT.getDefault(Settings.EMPTY); try { if (delay.millis() < connectingTimeout.millis()) { Thread.sleep(delay.millis()); diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index c1cfa56c8be..0764ad4ebea 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -34,6 +34,7 @@ import java.util.Iterator; import java.util.Map; import java.util.Random; import java.util.Set; +import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; @@ -56,8 +57,8 @@ public class InternalTestClusterTests extends ESTestCase { String nodePrefix = randomRealisticUnicodeOfCodepointLengthBetween(1, 10); Path baseDir = createTempDir(); - InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); - InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); + InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList(), Function.identity()); + InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList(), Function.identity()); // TODO: this is not ideal - we should have a way to make sure ports are initialized in the same way assertClusters(cluster0, cluster1, false); @@ -114,8 +115,8 @@ public class InternalTestClusterTests extends ESTestCase { String nodePrefix = "foobar"; Path baseDir = createTempDir(); - InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); - InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName2, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList()); + InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList(), Function.identity()); + InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, minNumDataNodes, maxNumDataNodes, clusterName2, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList(), Function.identity()); assertClusters(cluster0, cluster1, false); long seed = randomLong();