diff --git a/.gitignore b/.gitignore index cd5cdbd1839..31f2aa5fc66 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ work/ logs/ .DS_Store build/ +generated-resources/ **/.local* docs/html/ docs/build.log diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs deleted file mode 100644 index f1163fdd583..00000000000 --- a/.settings/org.eclipse.jdt.core.prefs +++ /dev/null @@ -1,18 +0,0 @@ -eclipse.preferences.version=1 -org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=enabled -org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore -org.eclipse.jdt.core.compiler.annotation.nullable=org.elasticsearch.common.Nullable -org.eclipse.jdt.core.compiler.annotation.nullanalysis=enabled -org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7 -org.eclipse.jdt.core.compiler.compliance=1.7 -org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning -org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning -org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=warning -org.eclipse.jdt.core.compiler.problem.nullReference=warning -org.eclipse.jdt.core.compiler.problem.nullSpecViolation=warning -org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning -org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning -org.eclipse.jdt.core.compiler.source=1.7 -org.eclipse.jdt.core.formatter.lineSplit=140 -org.eclipse.jdt.core.formatter.tabulation.char=space -org.eclipse.jdt.core.formatter.tabulation.size=4 diff --git a/build.gradle b/build.gradle index 621f137eb94..b3e5e09e07a 100644 --- a/build.gradle +++ b/build.gradle @@ -153,7 +153,7 @@ allprojects { apply plugin: 'idea' } -if (hasProperty('projectsPrefix') == false) { +if (projectsPrefix.isEmpty()) { idea { project { languageLevel = sourceCompatibility @@ -178,8 +178,19 @@ allprojects { } } } + task cleanEclipseSettings(type: Delete) { + delete '.settings' + } + task copyEclipseSettings(type: Copy) { + // TODO: "package this up" for external builds + from new File(project.rootDir, 'buildSrc/src/main/resources/eclipse.settings') + into '.settings' + } + // otherwise .settings is not nuked entirely + tasks.cleanEclipse.dependsOn(cleanEclipseSettings) // otherwise the eclipse merging is *super confusing* tasks.eclipse.dependsOn(cleanEclipse) + tasks.eclipse.dependsOn(copyEclipseSettings) } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index cf9a1ff7302..6c9c3f16d7c 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -84,9 +84,11 @@ class PluginBuildPlugin extends BuildPlugin { static Task configureBundleTask(Project project) { PluginPropertiesTask buildProperties = project.tasks.create(name: 'pluginProperties', type: PluginPropertiesTask) File pluginMetadata = project.file("src/main/plugin-metadata") - project.processTestResources { - from buildProperties - from pluginMetadata + project.sourceSets.test { + output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties') + resources { + srcDir pluginMetadata + } } Task bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties]) bundle.configure { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy index 202de784f15..be81d8826d8 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy @@ -19,77 +19,66 @@ package org.elasticsearch.gradle.plugin import org.elasticsearch.gradle.ElasticsearchProperties -import org.gradle.api.DefaultTask import org.gradle.api.InvalidUserDataException -import org.gradle.api.tasks.OutputFile -import org.gradle.api.tasks.TaskAction +import org.gradle.api.Task +import org.gradle.api.tasks.Copy /** * Creates a plugin descriptor. - * - * TODO: copy the example properties file to plugin documentation */ -class PluginPropertiesTask extends DefaultTask { +class PluginPropertiesTask extends Copy { PluginPropertiesExtension extension - Map properties = new HashMap<>() + File generatedResourcesDir = new File(project.projectDir, 'generated-resources') PluginPropertiesTask() { + File templateFile = new File(project.buildDir, 'templates/plugin-descriptor.properties') + Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') { + doLast { + InputStream resourceTemplate = PluginPropertiesTask.getResourceAsStream('/plugin-descriptor.properties') + templateFile.parentFile.mkdirs() + templateFile.setText(resourceTemplate.getText('UTF-8'), 'UTF-8') + } + } + dependsOn(copyPluginPropertiesTemplate) extension = project.extensions.create('esplugin', PluginPropertiesExtension, project) + project.clean { + delete generatedResourcesDir + } project.afterEvaluate { + // check require properties are set if (extension.description == null) { throw new InvalidUserDataException('description is a required setting for esplugin') } if (extension.jvm && extension.classname == null) { throw new InvalidUserDataException('classname is a required setting for esplugin with jvm=true') } - if (extension.jvm) { - dependsOn(project.classes) // so we can check for the classname - } - fillProperties() configure { - inputs.properties(properties) + doFirst { + if (extension.jvm && extension.isolated == false) { + String warning = "WARNING: Disabling plugin isolation in ${project.name} is deprecated and will be removed in the future" + logger.warn("${'=' * warning.length()}\n${warning}\n${'=' * warning.length()}") + } + } + // configure property substitution + from templateFile + into generatedResourcesDir + expand(generateSubstitutions()) } } } - @OutputFile - File propertiesFile = new File(project.buildDir, "plugin" + File.separator + "plugin-descriptor.properties") - - void fillProperties() { - // TODO: need to copy the templated plugin-descriptor with a dependent task, since copy requires a file (not uri) - properties = [ + Map generateSubstitutions() { + return [ 'name': extension.name, 'description': extension.description, 'version': extension.version, - 'elasticsearch.version': ElasticsearchProperties.version, + 'elasticsearchVersion': ElasticsearchProperties.version, + 'javaVersion': project.targetCompatibility as String, 'jvm': extension.jvm as String, - 'site': extension.site as String + 'site': extension.site as String, + 'isolated': extension.isolated as String, + 'classname': extension.jvm ? extension.classname : 'NA' ] - if (extension.jvm) { - properties['classname'] = extension.classname - properties['isolated'] = extension.isolated as String - properties['java.version'] = project.targetCompatibility as String - } - } - - @TaskAction - void buildProperties() { - if (extension.jvm) { - File classesDir = project.sourceSets.main.output.classesDir - File classFile = new File(classesDir, extension.classname.replace('.', File.separator) + '.class') - if (classFile.exists() == false) { - throw new InvalidUserDataException('classname ' + extension.classname + ' does not exist') - } - if (extension.isolated == false) { - logger.warn('Disabling isolation is deprecated and will be removed in the future') - } - } - - Properties props = new Properties() - for (Map.Entry prop : properties) { - props.put(prop.getKey(), prop.getValue()) - } - props.store(propertiesFile.newWriter(), null) } } diff --git a/.settings/org.eclipse.core.resources.prefs b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.core.resources.prefs similarity index 70% rename from .settings/org.eclipse.core.resources.prefs rename to buildSrc/src/main/resources/eclipse.settings/org.eclipse.core.resources.prefs index 5731b2f8244..6fd0a9aab13 100644 --- a/.settings/org.eclipse.core.resources.prefs +++ b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.core.resources.prefs @@ -1,6 +1,6 @@ eclipse.preferences.version=1 encoding//src/main/java=UTF-8 encoding//src/main/resources=UTF-8 +encoding//src/test/java=UTF-8 encoding//src/test/resources=UTF-8 -encoding/=UTF-8 -encoding/rest-api-spec=UTF-8 +encoding/=UTF-8 \ No newline at end of file diff --git a/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 00000000000..9bee5e587b0 --- /dev/null +++ b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,22 @@ +eclipse.preferences.version=1 + +# previous configuration from maven build +# this is merged with gradle's generated properties during 'gradle eclipse' + +# NOTE: null pointer analysis etc is not enabled currently, it seems very unstable +# (e.g. crashing eclipse etc) +# org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=enabled +# org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore +# org.eclipse.jdt.core.compiler.annotation.nullable=org.elasticsearch.common.Nullable +# org.eclipse.jdt.core.compiler.annotation.nullanalysis=enabled +# org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning +# org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=warning +# org.eclipse.jdt.core.compiler.problem.nullReference=warning +# org.eclipse.jdt.core.compiler.problem.nullSpecViolation=warning +# org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning +# org.eclipse.jdt.core.compiler.problem.potentialNullReference=warning + +org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning +org.eclipse.jdt.core.formatter.lineSplit=140 +org.eclipse.jdt.core.formatter.tabulation.char=space +org.eclipse.jdt.core.formatter.tabulation.size=4 diff --git a/.settings/org.eclipse.jdt.ui.prefs b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.ui.prefs similarity index 81% rename from .settings/org.eclipse.jdt.ui.prefs rename to buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.ui.prefs index 4a9959fc9fc..391a8715868 100644 --- a/.settings/org.eclipse.jdt.ui.prefs +++ b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.ui.prefs @@ -3,4 +3,4 @@ formatter_settings_version=12 # Intellij IDEA import order org.eclipse.jdt.ui.importorder=;com;org;java;javax;\#; # License header -org.eclipse.jdt.ui.text.custom_code_templates= +org.eclipse.jdt.ui.text.custom_code_templates= diff --git a/buildSrc/src/main/resources/plugin-descriptor.properties b/buildSrc/src/main/resources/plugin-descriptor.properties index 4e5486f482d..4c676c26cad 100644 --- a/buildSrc/src/main/resources/plugin-descriptor.properties +++ b/buildSrc/src/main/resources/plugin-descriptor.properties @@ -31,19 +31,19 @@ ### mandatory elements for all plugins: # # 'description': simple summary of the plugin -description=${project.description} +description=${description} # # 'version': plugin's version -version=${project.version} +version=${version} # # 'name': the plugin name -name=${elasticsearch.plugin.name} +name=${name} ### mandatory elements for site plugins: # # 'site': set to true to indicate contents of the _site/ # directory in the root of the plugin should be served. -site=${elasticsearch.plugin.site} +site=${site} # ### mandatory elements for jvm plugins : # @@ -52,19 +52,19 @@ site=${elasticsearch.plugin.site} # Note that only jar files in the root directory are # added to the classpath for the plugin! If you need # other resources, package them into a resources jar. -jvm=${elasticsearch.plugin.jvm} +jvm=${jvm} # # 'classname': the name of the class to load, fully-qualified. -classname=${elasticsearch.plugin.classname} +classname=${classname} # # 'java.version' version of java the code is built against # use the system property java.specification.version # version string must be a sequence of nonnegative decimal integers # separated by "."'s and may have leading zeros -java.version=${java.target.version} +java.version=${javaVersion} # # 'elasticsearch.version' version of elasticsearch compiled against -elasticsearch.version=${elasticsearch.version} +elasticsearch.version=${elasticsearchVersion} # ### deprecated elements for jvm plugins : # @@ -72,5 +72,5 @@ elasticsearch.version=${elasticsearch.version} # passing false is deprecated, and only intended to support plugins # that have hard dependencies against each other. If this is # not specified, then the plugin is isolated by default. -isolated=${elasticsearch.plugin.isolated} +isolated=${isolated} # \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java index 68bcf6d269c..40e8b0730ff 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java @@ -38,13 +38,11 @@ public class SearchPhaseExecutionException extends ElasticsearchException { private final ShardSearchFailure[] shardFailures; public SearchPhaseExecutionException(String phaseName, String msg, ShardSearchFailure[] shardFailures) { - super(msg); - this.phaseName = phaseName; - this.shardFailures = shardFailures; + this(phaseName, msg, null, shardFailures); } public SearchPhaseExecutionException(String phaseName, String msg, Throwable cause, ShardSearchFailure[] shardFailures) { - super(msg, cause); + super(msg, deduplicateCause(cause, shardFailures)); this.phaseName = phaseName; this.shardFailures = shardFailures; } @@ -63,12 +61,26 @@ public class SearchPhaseExecutionException extends ElasticsearchException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalString(phaseName); - out.writeVInt(shardFailures == null ? 0 : shardFailures.length); - if (shardFailures != null) { + out.writeVInt(shardFailures.length); + for (ShardSearchFailure failure : shardFailures) { + failure.writeTo(out); + } + } + + private static final Throwable deduplicateCause(Throwable cause, ShardSearchFailure[] shardFailures) { + if (shardFailures == null) { + throw new IllegalArgumentException("shardSearchFailures must not be null"); + } + // if the cause of this exception is also the cause of one of the shard failures we don't add it + // to prevent duplication in stack traces rendered to the REST layer + if (cause != null) { for (ShardSearchFailure failure : shardFailures) { - failure.writeTo(out); + if (failure.getCause() == cause) { + return null; + } } } + return cause; } @Override diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java index cd4238ccdea..2a953f9b732 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java @@ -114,7 +114,7 @@ public class TransportSearchScrollQueryAndFetchAction extends AbstractComponent public void start() { if (scrollId.getContext().length == 0) { - listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", null)); + listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY)); return; } @@ -175,7 +175,7 @@ public class TransportSearchScrollQueryAndFetchAction extends AbstractComponent successfulOps.decrementAndGet(); if (counter.decrementAndGet() == 0) { if (successfulOps.get() == 0) { - listener.onFailure(new SearchPhaseExecutionException("query_fetch", "all shards failed", buildShardFailures())); + listener.onFailure(new SearchPhaseExecutionException("query_fetch", "all shards failed", t, buildShardFailures())); } else { finishHim(); } diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java index 85b06ea7860..8f2df714319 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java @@ -123,7 +123,7 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent public void start() { if (scrollId.getContext().length == 0) { - listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", null)); + listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY)); return; } final AtomicInteger counter = new AtomicInteger(scrollId.getContext().length); @@ -143,7 +143,7 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent try { executeFetchPhase(); } catch (Throwable e) { - listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, null)); + listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, ShardSearchFailure.EMPTY_ARRAY)); return; } } @@ -181,12 +181,12 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent successfulOps.decrementAndGet(); if (counter.decrementAndGet() == 0) { if (successfulOps.get() == 0) { - listener.onFailure(new SearchPhaseExecutionException("query", "all shards failed", buildShardFailures())); + listener.onFailure(new SearchPhaseExecutionException("query", "all shards failed", t, buildShardFailures())); } else { try { executeFetchPhase(); } catch (Throwable e) { - listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, null)); + listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, ShardSearchFailure.EMPTY_ARRAY)); } } } diff --git a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java index fa5776387dd..31cd3986d2f 100644 --- a/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java @@ -220,17 +220,19 @@ public abstract class TransportSearchTypeAction extends TransportAction i public String id() { return id; } - + /** * Sets the id of document the term vector is requested for. */ @@ -651,7 +650,7 @@ public class TermVectorsRequest extends SingleShardRequest i if (e.getValue() instanceof String) { mapStrStr.put(e.getKey(), (String) e.getValue()); } else { - throw new ElasticsearchException("expecting the analyzer at [{}] to be a String, but found [{}] instead", e.getKey(), e.getValue().getClass()); + throw new ElasticsearchParseException("expecting the analyzer at [{}] to be a String, but found [{}] instead", e.getKey(), e.getValue().getClass()); } } return mapStrStr; diff --git a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 60a4913bb5e..31f5eb4a921 100644 --- a/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/core/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -177,7 +177,7 @@ public class ClusterModule extends AbstractModule { registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE); registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, Validator.TIME_NON_NEGATIVE); registerClusterDynamicSetting(RecoverySettings.INDICES_RECOVERY_MAX_SIZE_PER_SEC, Validator.BYTES_SIZE); - registerClusterDynamicSetting(ThreadPool.THREADPOOL_GROUP + "*", Validator.EMPTY); + registerClusterDynamicSetting(ThreadPool.THREADPOOL_GROUP + "*", ThreadPool.THREAD_POOL_TYPE_SETTINGS_VALIDATOR); registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES, Validator.INTEGER); registerClusterDynamicSetting(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, Validator.INTEGER); registerClusterDynamicSetting(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK, Validator.EMPTY); diff --git a/core/src/main/java/org/elasticsearch/http/HttpServer.java b/core/src/main/java/org/elasticsearch/http/HttpServer.java index f3b8c3f0a4b..9971ce7722d 100644 --- a/core/src/main/java/org/elasticsearch/http/HttpServer.java +++ b/core/src/main/java/org/elasticsearch/http/HttpServer.java @@ -189,7 +189,7 @@ public class HttpServer extends AbstractLifecycleComponent { sitePath = null; // If a trailing / is missing, we redirect to the right page #2654 String redirectUrl = request.rawPath() + "/"; - BytesRestResponse restResponse = new BytesRestResponse(RestStatus.MOVED_PERMANENTLY, "text/html", ""); + BytesRestResponse restResponse = new BytesRestResponse(RestStatus.MOVED_PERMANENTLY, "text/html", ""); restResponse.addHeader("Location", redirectUrl); channel.sendResponse(restResponse); return; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java index fc47fc561cb..9dc1a1d3dc7 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java @@ -355,7 +355,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll, try { clone = (ObjectMapper) super.clone(); } catch (CloneNotSupportedException e) { - throw new RuntimeException(); + throw new RuntimeException(e); } return clone; } diff --git a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java index 86de4e31129..3ecbd11320e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java @@ -39,7 +39,7 @@ public class CommonTermsQueryParser implements QueryParser { } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { - throw new ParsingException(parser.getTokenLocation(), "[exists] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } if (fieldPattern == null) { - throw new ParsingException(parser.getTokenLocation(), "exists must be provided with a [field]"); + throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] must be provided with a [field]"); } ExistsQueryBuilder builder = new ExistsQueryBuilder(fieldPattern); diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java index e5198952c13..768600c3b38 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java @@ -70,8 +70,10 @@ public class GeoShapeQueryParser implements QueryParser { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { + if (fieldName != null) { + throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] point specified twice. [" + currentFieldName + "]"); + } fieldName = currentFieldName; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); @@ -104,10 +106,12 @@ public class GeoShapeQueryParser implements QueryParser { } else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_PATH_FIELD)) { shapePath = parser.text(); } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } } else { - throw new ParsingException(parser.getTokenLocation(), "[geo_shape] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } } @@ -117,7 +121,7 @@ public class GeoShapeQueryParser implements QueryParser { } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else { - throw new ParsingException(parser.getTokenLocation(), "[geo_shape] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } } diff --git a/core/src/main/java/org/elasticsearch/index/query/IdsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/IdsQueryParser.java index 0ffd31644e5..0496a690f5f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/IdsQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/IdsQueryParser.java @@ -79,7 +79,7 @@ public class IdsQueryParser implements QueryParser { types.add(value); } } else { - throw new ParsingException(parser.getTokenLocation(), "[ids] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("type".equals(currentFieldName) || "_type".equals(currentFieldName)) { @@ -89,12 +89,14 @@ public class IdsQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parser.getTokenLocation(), "[ids] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } if (!idsProvided) { - throw new ParsingException(parser.getTokenLocation(), "[ids] query, no ids values provided"); + throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] query, no ids values provided"); } IdsQueryBuilder query = new IdsQueryBuilder(types.toArray(new String[types.size()])); diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryParser.java index 770f9c3dd67..6cab98838e0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryParser.java @@ -52,8 +52,10 @@ public class MatchAllQueryParser implements QueryParser { } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { - throw new ParsingException(parser.getTokenLocation(), "[match_all] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + MatchAllQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + MatchAllQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } MatchAllQueryBuilder queryBuilder = new MatchAllQueryBuilder(); diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchNoneQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MatchNoneQueryParser.java index 96b33f545f8..449824e72c9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchNoneQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchNoneQueryParser.java @@ -51,6 +51,8 @@ public class MatchNoneQueryParser implements QueryParser } else { throw new ParsingException(parser.getTokenLocation(), "["+MatchNoneQueryBuilder.NAME+"] query does not support [" + currentFieldName + "]"); } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + MatchNoneQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryParser.java index afcf25ca2a7..c50256480a0 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryParser.java @@ -55,7 +55,7 @@ public class MatchQueryParser implements QueryParser { XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { - throw new ParsingException(parser.getTokenLocation(), "[match] query malformed, no field"); + throw new ParsingException(parser.getTokenLocation(), "[" + MatchQueryBuilder.NAME + "] query malformed, no field"); } String fieldName = parser.currentName(); @@ -93,7 +93,7 @@ public class MatchQueryParser implements QueryParser { } else if ("phrase_prefix".equals(tStr) || "phrasePrefix".equals(currentFieldName)) { type = MatchQuery.Type.PHRASE_PREFIX; } else { - throw new ParsingException(parser.getTokenLocation(), "[match] query does not support type " + tStr); + throw new ParsingException(parser.getTokenLocation(), "[" + MatchQueryBuilder.NAME + "] query does not support type " + tStr); } } else if ("analyzer".equals(currentFieldName)) { analyzer = parser.text(); @@ -131,8 +131,10 @@ public class MatchQueryParser implements QueryParser { } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { - throw new ParsingException(parser.getTokenLocation(), "[match] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + MatchQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + MatchQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } parser.nextToken(); diff --git a/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java index 8d8c5aec01f..3a696dad38f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java @@ -61,8 +61,10 @@ public class MissingQueryParser implements QueryParser { } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { - throw new ParsingException(parser.getTokenLocation(), "[missing] query does not support [" + currentFieldName + "]"); + throw new ParsingException(parser.getTokenLocation(), "[" + MissingQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + MissingQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java index c86d0295577..d52f6707aa1 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryParser.java @@ -122,8 +122,10 @@ public class MultiMatchQueryParser implements QueryParser 1 - && ((booleanQuery.clauses().iterator().next().getQuery() instanceof BooleanQuery) == false)) { - // special case for one term query and more than one field: (f1:t1 f2:t1 f3:t1) - // we need to wrap this in additional BooleanQuery so minimum_should_match is applied correctly - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.add(new BooleanClause(booleanQuery, Occur.SHOULD)); - booleanQuery = builder.build(); - } - if (minimumShouldMatch != null) { - booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch); - } - query = booleanQuery; + if (minimumShouldMatch != null && query instanceof BooleanQuery) { + query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); } return query; } diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java index c803d440055..ca8b5d10ec6 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java @@ -146,6 +146,8 @@ public class SimpleQueryStringParser implements QueryParser, ToXContent { public TermsLookup(String index, String type, String id, String path) { if (id == null) { - throw new IllegalArgumentException("[terms] query lookup element requires specifying the id."); + throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the id."); } if (type == null) { - throw new IllegalArgumentException("[terms] query lookup element requires specifying the type."); + throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the type."); } if (path == null) { - throw new IllegalArgumentException("[terms] query lookup element requires specifying the path."); + throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the path."); } this.index = index; this.type = type; @@ -122,9 +123,11 @@ public class TermsLookup implements Writeable, ToXContent { path = parser.text(); break; default: - throw new ParsingException(parser.getTokenLocation(), "[terms] query does not support [" + currentFieldName + throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "] within lookup element"); } + } else { + throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]"); } } return new TermsLookup(index, type, id, path).routing(routing); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java index 311c4eca2cc..fa2e662c738 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java @@ -288,7 +288,7 @@ public class RestThreadPoolAction extends AbstractCatAction { } } - table.addCell(poolInfo == null ? null : poolInfo.getType()); + table.addCell(poolInfo == null ? null : poolInfo.getThreadPoolType().getType()); table.addCell(poolStats == null ? null : poolStats.getActive()); table.addCell(poolStats == null ? null : poolStats.getThreads()); table.addCell(poolStats == null ? null : poolStats.getQueue()); diff --git a/core/src/main/java/org/elasticsearch/search/query/FilterBinaryParseElement.java b/core/src/main/java/org/elasticsearch/search/query/FilterBinaryParseElement.java deleted file mode 100644 index 30fbb061a1f..00000000000 --- a/core/src/main/java/org/elasticsearch/search/query/FilterBinaryParseElement.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.query; - -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.ParsedQuery; -import org.elasticsearch.search.SearchParseElement; -import org.elasticsearch.search.internal.SearchContext; - -/** - * - */ -public class FilterBinaryParseElement implements SearchParseElement { - - @Override - public void parse(XContentParser parser, SearchContext context) throws Exception { - byte[] filterSource = parser.binaryValue(); - try (XContentParser fSourceParser = XContentFactory.xContent(filterSource).createParser(filterSource)) { - ParsedQuery filter = context.indexShard().getQueryShardContext().parseInnerFilter(fSourceParser); - if (filter != null) { - context.parsedPostFilter(filter); - } - } - } -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryBinaryParseElement.java b/core/src/main/java/org/elasticsearch/search/query/QueryBinaryParseElement.java deleted file mode 100644 index af9ea506593..00000000000 --- a/core/src/main/java/org/elasticsearch/search/query/QueryBinaryParseElement.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.query; - -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.SearchParseElement; -import org.elasticsearch.search.internal.SearchContext; - -/** - * - */ -public class QueryBinaryParseElement implements SearchParseElement { - - @Override - public void parse(XContentParser parser, SearchContext context) throws Exception { - byte[] querySource = parser.binaryValue(); - try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) { - context.parsedQuery(context.indexShard().getQueryShardContext().parse(qSourceParser)); - } - } -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 44fae80a020..ce8836cd336 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -90,12 +90,8 @@ public class QueryPhase implements SearchPhase { parseElements.put("indices_boost", new IndicesBoostParseElement()); parseElements.put("indicesBoost", new IndicesBoostParseElement()); parseElements.put("query", new QueryParseElement()); - parseElements.put("queryBinary", new QueryBinaryParseElement()); - parseElements.put("query_binary", new QueryBinaryParseElement()); parseElements.put("post_filter", new PostFilterParseElement()); parseElements.put("postFilter", new PostFilterParseElement()); - parseElements.put("filterBinary", new FilterBinaryParseElement()); - parseElements.put("filter_binary", new FilterBinaryParseElement()); parseElements.put("sort", new SortParseElement()); parseElements.put("trackScores", new TrackScoresParseElement()); parseElements.put("track_scores", new TrackScoresParseElement()); diff --git a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 039b46b5c7a..b0d81279b03 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/core/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -20,6 +20,8 @@ package org.elasticsearch.threadpool; import org.apache.lucene.util.Counter; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.settings.Validator; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; @@ -39,22 +41,11 @@ import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.node.settings.NodeSettingsService; import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Queue; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.concurrent.Executor; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.RejectedExecutionHandler; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.ScheduledThreadPoolExecutor; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; +import java.util.*; +import java.util.concurrent.*; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -86,6 +77,101 @@ public class ThreadPool extends AbstractComponent { public static final String FETCH_SHARD_STORE = "fetch_shard_store"; } + public enum ThreadPoolType { + CACHED("cached"), + DIRECT("direct"), + FIXED("fixed"), + SCALING("scaling"); + + private final String type; + + public String getType() { + return type; + } + + ThreadPoolType(String type) { + this.type = type; + } + + private final static Map TYPE_MAP; + + static { + Map typeMap = new HashMap<>(); + for (ThreadPoolType threadPoolType : ThreadPoolType.values()) { + typeMap.put(threadPoolType.getType(), threadPoolType); + } + TYPE_MAP = Collections.unmodifiableMap(typeMap); + } + + public static ThreadPoolType fromType(String type) { + ThreadPoolType threadPoolType = TYPE_MAP.get(type); + if (threadPoolType == null) { + throw new IllegalArgumentException("no ThreadPoolType for " + type); + } + return threadPoolType; + } + } + + public static Map THREAD_POOL_TYPES; + + static { + HashMap map = new HashMap<>(); + map.put(Names.SAME, ThreadPoolType.DIRECT); + map.put(Names.GENERIC, ThreadPoolType.CACHED); + map.put(Names.LISTENER, ThreadPoolType.FIXED); + map.put(Names.GET, ThreadPoolType.FIXED); + map.put(Names.INDEX, ThreadPoolType.FIXED); + map.put(Names.BULK, ThreadPoolType.FIXED); + map.put(Names.SEARCH, ThreadPoolType.FIXED); + map.put(Names.SUGGEST, ThreadPoolType.FIXED); + map.put(Names.PERCOLATE, ThreadPoolType.FIXED); + map.put(Names.MANAGEMENT, ThreadPoolType.SCALING); + map.put(Names.FLUSH, ThreadPoolType.SCALING); + map.put(Names.REFRESH, ThreadPoolType.SCALING); + map.put(Names.WARMER, ThreadPoolType.SCALING); + map.put(Names.SNAPSHOT, ThreadPoolType.SCALING); + map.put(Names.FORCE_MERGE, ThreadPoolType.FIXED); + map.put(Names.FETCH_SHARD_STARTED, ThreadPoolType.SCALING); + map.put(Names.FETCH_SHARD_STORE, ThreadPoolType.SCALING); + THREAD_POOL_TYPES = Collections.unmodifiableMap(map); + } + + private static void add(Map executorSettings, ExecutorSettingsBuilder builder) { + Settings settings = builder.build(); + String name = settings.get("name"); + executorSettings.put(name, settings); + } + + private static class ExecutorSettingsBuilder { + Map settings = new HashMap<>(); + + public ExecutorSettingsBuilder(String name) { + settings.put("name", name); + settings.put("type", THREAD_POOL_TYPES.get(name).getType()); + } + + public ExecutorSettingsBuilder size(int availableProcessors) { + return add("size", Integer.toString(availableProcessors)); + } + + public ExecutorSettingsBuilder queueSize(int queueSize) { + return add("queue_size", Integer.toString(queueSize)); + } + + public ExecutorSettingsBuilder keepAlive(String keepAlive) { + return add("keep_alive", keepAlive); + } + + private ExecutorSettingsBuilder add(String key, String value) { + settings.put(key, value); + return this; + } + + public Settings build() { + return settingsBuilder().put(settings).build(); + } + } + public static final String THREADPOOL_GROUP = "threadpool."; private volatile Map executors; @@ -102,7 +188,6 @@ public class ThreadPool extends AbstractComponent { static final Executor DIRECT_EXECUTOR = command -> command.run(); - public ThreadPool(String name) { this(Settings.builder().put("name", name).build()); } @@ -112,42 +197,31 @@ public class ThreadPool extends AbstractComponent { assert settings.get("name") != null : "ThreadPool's settings should contain a name"; - Map groupSettings = settings.getGroups(THREADPOOL_GROUP); + Map groupSettings = getThreadPoolSettingsGroup(settings); int availableProcessors = EsExecutors.boundedNumberOfProcessors(settings); int halfProcMaxAt5 = Math.min(((availableProcessors + 1) / 2), 5); int halfProcMaxAt10 = Math.min(((availableProcessors + 1) / 2), 10); Map defaultExecutorTypeSettings = new HashMap<>(); - defaultExecutorTypeSettings.put(Names.GENERIC, settingsBuilder().put("type", "cached").put("keep_alive", "30s").build()); - defaultExecutorTypeSettings.put(Names.INDEX, - settingsBuilder().put("type", "fixed").put("size", availableProcessors).put("queue_size", 200).build()); - defaultExecutorTypeSettings.put(Names.BULK, - settingsBuilder().put("type", "fixed").put("size", availableProcessors).put("queue_size", 50).build()); - defaultExecutorTypeSettings.put(Names.GET, - settingsBuilder().put("type", "fixed").put("size", availableProcessors).put("queue_size", 1000).build()); - defaultExecutorTypeSettings.put(Names.SEARCH, - settingsBuilder().put("type", "fixed").put("size", ((availableProcessors * 3) / 2) + 1).put("queue_size", 1000).build()); - defaultExecutorTypeSettings.put(Names.SUGGEST, - settingsBuilder().put("type", "fixed").put("size", availableProcessors).put("queue_size", 1000).build()); - defaultExecutorTypeSettings.put(Names.PERCOLATE, - settingsBuilder().put("type", "fixed").put("size", availableProcessors).put("queue_size", 1000).build()); - defaultExecutorTypeSettings .put(Names.MANAGEMENT, settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", 5).build()); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GENERIC).keepAlive("30s")); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.INDEX).size(availableProcessors).queueSize(200)); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.BULK).size(availableProcessors).queueSize(50)); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.GET).size(availableProcessors).queueSize(1000)); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.SEARCH).size(((availableProcessors * 3) / 2) + 1).queueSize(1000)); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.SUGGEST).size(availableProcessors).queueSize(1000)); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.PERCOLATE).size(availableProcessors).queueSize(1000)); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.MANAGEMENT).size(5).keepAlive("5m")); // no queue as this means clients will need to handle rejections on listener queue even if the operation succeeded // the assumption here is that the listeners should be very lightweight on the listeners side - defaultExecutorTypeSettings.put(Names.LISTENER, settingsBuilder().put("type", "fixed").put("size", halfProcMaxAt10).build()); - defaultExecutorTypeSettings.put(Names.FLUSH, - settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", halfProcMaxAt5).build()); - defaultExecutorTypeSettings.put(Names.REFRESH, - settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", halfProcMaxAt10).build()); - defaultExecutorTypeSettings.put(Names.WARMER, - settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", halfProcMaxAt5).build()); - defaultExecutorTypeSettings.put(Names.SNAPSHOT, - settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", halfProcMaxAt5).build()); - defaultExecutorTypeSettings.put(Names.FORCE_MERGE, settingsBuilder().put("type", "fixed").put("size", 1).build()); - defaultExecutorTypeSettings.put(Names.FETCH_SHARD_STARTED, - settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", availableProcessors * 2).build()); - defaultExecutorTypeSettings.put(Names.FETCH_SHARD_STORE, - settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", availableProcessors * 2).build()); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.LISTENER).size(halfProcMaxAt10)); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FLUSH).size(halfProcMaxAt5).keepAlive("5m")); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.REFRESH).size(halfProcMaxAt10).keepAlive("5m")); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.WARMER).size(halfProcMaxAt5).keepAlive("5m")); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.SNAPSHOT).size(halfProcMaxAt5).keepAlive("5m")); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FORCE_MERGE).size(1)); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FETCH_SHARD_STARTED).size(availableProcessors * 2).keepAlive("5m")); + add(defaultExecutorTypeSettings, new ExecutorSettingsBuilder(Names.FETCH_SHARD_STORE).size(availableProcessors * 2).keepAlive("5m")); + this.defaultExecutorTypeSettings = unmodifiableMap(defaultExecutorTypeSettings); Map executors = new HashMap<>(); @@ -163,8 +237,8 @@ public class ThreadPool extends AbstractComponent { executors.put(entry.getKey(), build(entry.getKey(), entry.getValue(), Settings.EMPTY)); } - executors.put(Names.SAME, new ExecutorHolder(DIRECT_EXECUTOR, new Info(Names.SAME, "same"))); - if (!executors.get(Names.GENERIC).info.getType().equals("cached")) { + executors.put(Names.SAME, new ExecutorHolder(DIRECT_EXECUTOR, new Info(Names.SAME, ThreadPoolType.DIRECT))); + if (!executors.get(Names.GENERIC).info.getThreadPoolType().equals(ThreadPoolType.CACHED)) { throw new IllegalArgumentException("generic thread pool must be of type cached"); } this.executors = unmodifiableMap(executors); @@ -178,6 +252,12 @@ public class ThreadPool extends AbstractComponent { this.estimatedTimeThread.start(); } + private Map getThreadPoolSettingsGroup(Settings settings) { + Map groupSettings = settings.getGroups(THREADPOOL_GROUP); + validate(groupSettings); + return groupSettings; + } + public void setNodeSettingsService(NodeSettingsService nodeSettingsService) { if(settingsListenerIsSet) { throw new IllegalStateException("the node settings listener was set more then once"); @@ -326,24 +406,28 @@ public class ThreadPool extends AbstractComponent { settings = Settings.Builder.EMPTY_SETTINGS; } Info previousInfo = previousExecutorHolder != null ? previousExecutorHolder.info : null; - String type = settings.get("type", previousInfo != null ? previousInfo.getType() : defaultSettings.get("type")); + String type = settings.get("type", previousInfo != null ? previousInfo.getThreadPoolType().getType() : defaultSettings.get("type")); + ThreadPoolType threadPoolType = ThreadPoolType.fromType(type); ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(this.settings, name); - if ("same".equals(type)) { + if (ThreadPoolType.DIRECT == threadPoolType) { if (previousExecutorHolder != null) { logger.debug("updating thread_pool [{}], type [{}]", name, type); } else { logger.debug("creating thread_pool [{}], type [{}]", name, type); } - return new ExecutorHolder(DIRECT_EXECUTOR, new Info(name, type)); - } else if ("cached".equals(type)) { + return new ExecutorHolder(DIRECT_EXECUTOR, new Info(name, threadPoolType)); + } else if (ThreadPoolType.CACHED == threadPoolType) { + if (!Names.GENERIC.equals(name)) { + throw new IllegalArgumentException("thread pool type cached is reserved only for the generic thread pool and can not be applied to [" + name + "]"); + } TimeValue defaultKeepAlive = defaultSettings.getAsTime("keep_alive", timeValueMinutes(5)); if (previousExecutorHolder != null) { - if ("cached".equals(previousInfo.getType())) { + if (ThreadPoolType.CACHED == previousInfo.getThreadPoolType()) { TimeValue updatedKeepAlive = settings.getAsTime("keep_alive", previousInfo.getKeepAlive()); if (!previousInfo.getKeepAlive().equals(updatedKeepAlive)) { logger.debug("updating thread_pool [{}], type [{}], keep_alive [{}]", name, type, updatedKeepAlive); ((EsThreadPoolExecutor) previousExecutorHolder.executor()).setKeepAliveTime(updatedKeepAlive.millis(), TimeUnit.MILLISECONDS); - return new ExecutorHolder(previousExecutorHolder.executor(), new Info(name, type, -1, -1, updatedKeepAlive, null)); + return new ExecutorHolder(previousExecutorHolder.executor(), new Info(name, threadPoolType, -1, -1, updatedKeepAlive, null)); } return previousExecutorHolder; } @@ -358,13 +442,13 @@ public class ThreadPool extends AbstractComponent { logger.debug("creating thread_pool [{}], type [{}], keep_alive [{}]", name, type, keepAlive); } Executor executor = EsExecutors.newCached(name, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory); - return new ExecutorHolder(executor, new Info(name, type, -1, -1, keepAlive, null)); - } else if ("fixed".equals(type)) { + return new ExecutorHolder(executor, new Info(name, threadPoolType, -1, -1, keepAlive, null)); + } else if (ThreadPoolType.FIXED == threadPoolType) { int defaultSize = defaultSettings.getAsInt("size", EsExecutors.boundedNumberOfProcessors(settings)); SizeValue defaultQueueSize = getAsSizeOrUnbounded(defaultSettings, "queue", getAsSizeOrUnbounded(defaultSettings, "queue_size", null)); if (previousExecutorHolder != null) { - if ("fixed".equals(previousInfo.getType())) { + if (ThreadPoolType.FIXED == previousInfo.getThreadPoolType()) { SizeValue updatedQueueSize = getAsSizeOrUnbounded(settings, "capacity", getAsSizeOrUnbounded(settings, "queue", getAsSizeOrUnbounded(settings, "queue_size", previousInfo.getQueueSize()))); if (Objects.equals(previousInfo.getQueueSize(), updatedQueueSize)) { int updatedSize = settings.getAsInt("size", previousInfo.getMax()); @@ -378,7 +462,7 @@ public class ThreadPool extends AbstractComponent { ((EsThreadPoolExecutor) previousExecutorHolder.executor()).setCorePoolSize(updatedSize); ((EsThreadPoolExecutor) previousExecutorHolder.executor()).setMaximumPoolSize(updatedSize); } - return new ExecutorHolder(previousExecutorHolder.executor(), new Info(name, type, updatedSize, updatedSize, null, updatedQueueSize)); + return new ExecutorHolder(previousExecutorHolder.executor(), new Info(name, threadPoolType, updatedSize, updatedSize, null, updatedQueueSize)); } return previousExecutorHolder; } @@ -393,13 +477,13 @@ public class ThreadPool extends AbstractComponent { SizeValue queueSize = getAsSizeOrUnbounded(settings, "capacity", getAsSizeOrUnbounded(settings, "queue", getAsSizeOrUnbounded(settings, "queue_size", defaultQueueSize))); logger.debug("creating thread_pool [{}], type [{}], size [{}], queue_size [{}]", name, type, size, queueSize); Executor executor = EsExecutors.newFixed(name, size, queueSize == null ? -1 : (int) queueSize.singles(), threadFactory); - return new ExecutorHolder(executor, new Info(name, type, size, size, null, queueSize)); - } else if ("scaling".equals(type)) { + return new ExecutorHolder(executor, new Info(name, threadPoolType, size, size, null, queueSize)); + } else if (ThreadPoolType.SCALING == threadPoolType) { TimeValue defaultKeepAlive = defaultSettings.getAsTime("keep_alive", timeValueMinutes(5)); int defaultMin = defaultSettings.getAsInt("min", 1); int defaultSize = defaultSettings.getAsInt("size", EsExecutors.boundedNumberOfProcessors(settings)); if (previousExecutorHolder != null) { - if ("scaling".equals(previousInfo.getType())) { + if (ThreadPoolType.SCALING == previousInfo.getThreadPoolType()) { TimeValue updatedKeepAlive = settings.getAsTime("keep_alive", previousInfo.getKeepAlive()); int updatedMin = settings.getAsInt("min", previousInfo.getMin()); int updatedSize = settings.getAsInt("max", settings.getAsInt("size", previousInfo.getMax())); @@ -414,7 +498,7 @@ public class ThreadPool extends AbstractComponent { if (previousInfo.getMax() != updatedSize) { ((EsThreadPoolExecutor) previousExecutorHolder.executor()).setMaximumPoolSize(updatedSize); } - return new ExecutorHolder(previousExecutorHolder.executor(), new Info(name, type, updatedMin, updatedSize, updatedKeepAlive, null)); + return new ExecutorHolder(previousExecutorHolder.executor(), new Info(name, threadPoolType, updatedMin, updatedSize, updatedKeepAlive, null)); } return previousExecutorHolder; } @@ -437,13 +521,13 @@ public class ThreadPool extends AbstractComponent { logger.debug("creating thread_pool [{}], type [{}], min [{}], size [{}], keep_alive [{}]", name, type, min, size, keepAlive); } Executor executor = EsExecutors.newScaling(name, min, size, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory); - return new ExecutorHolder(executor, new Info(name, type, min, size, keepAlive, null)); + return new ExecutorHolder(executor, new Info(name, threadPoolType, min, size, keepAlive, null)); } throw new IllegalArgumentException("No type found [" + type + "], for [" + name + "]"); } public void updateSettings(Settings settings) { - Map groupSettings = settings.getGroups("threadpool"); + Map groupSettings = getThreadPoolSettingsGroup(settings); if (groupSettings.isEmpty()) { return; } @@ -490,6 +574,20 @@ public class ThreadPool extends AbstractComponent { } } + private void validate(Map groupSettings) { + for (String key : groupSettings.keySet()) { + if (!THREAD_POOL_TYPES.containsKey(key)) { + continue; + } + String type = groupSettings.get(key).get("type"); + ThreadPoolType correctThreadPoolType = THREAD_POOL_TYPES.get(key); + // TODO: the type equality check can be removed after #3760/#6732 are addressed + if (type != null && !correctThreadPoolType.getType().equals(type)) { + throw new IllegalArgumentException("setting " + THREADPOOL_GROUP + key + ".type to " + type + " is not permitted; must be " + correctThreadPoolType.getType()); + } + } + } + /** * A thread pool size can also be unbounded and is represented by -1, which is not supported by SizeValue (which only supports positive numbers) */ @@ -643,7 +741,7 @@ public class ThreadPool extends AbstractComponent { public static class Info implements Streamable, ToXContent { private String name; - private String type; + private ThreadPoolType type; private int min; private int max; private TimeValue keepAlive; @@ -653,15 +751,15 @@ public class ThreadPool extends AbstractComponent { } - public Info(String name, String type) { + public Info(String name, ThreadPoolType type) { this(name, type, -1); } - public Info(String name, String type, int size) { + public Info(String name, ThreadPoolType type, int size) { this(name, type, size, size, null, null); } - public Info(String name, String type, int min, int max, @Nullable TimeValue keepAlive, @Nullable SizeValue queueSize) { + public Info(String name, ThreadPoolType type, int min, int max, @Nullable TimeValue keepAlive, @Nullable SizeValue queueSize) { this.name = name; this.type = type; this.min = min; @@ -674,7 +772,7 @@ public class ThreadPool extends AbstractComponent { return this.name; } - public String getType() { + public ThreadPoolType getThreadPoolType() { return this.type; } @@ -699,7 +797,7 @@ public class ThreadPool extends AbstractComponent { @Override public void readFrom(StreamInput in) throws IOException { name = in.readString(); - type = in.readString(); + type = ThreadPoolType.fromType(in.readString()); min = in.readInt(); max = in.readInt(); if (in.readBoolean()) { @@ -716,7 +814,7 @@ public class ThreadPool extends AbstractComponent { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); - out.writeString(type); + out.writeString(type.getType()); out.writeInt(min); out.writeInt(max); if (keepAlive == null) { @@ -739,7 +837,7 @@ public class ThreadPool extends AbstractComponent { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name, XContentBuilder.FieldCaseConversion.NONE); - builder.field(Fields.TYPE, type); + builder.field(Fields.TYPE, type.getType()); if (min != -1) { builder.field(Fields.MIN, min); } @@ -814,4 +912,37 @@ public class ThreadPool extends AbstractComponent { return false; } + public static ThreadPoolTypeSettingsValidator THREAD_POOL_TYPE_SETTINGS_VALIDATOR = new ThreadPoolTypeSettingsValidator(); + private static class ThreadPoolTypeSettingsValidator implements Validator { + @Override + public String validate(String setting, String value, ClusterState clusterState) { + // TODO: the type equality validation can be removed after #3760/#6732 are addressed + Matcher matcher = Pattern.compile("threadpool\\.(.*)\\.type").matcher(setting); + if (!matcher.matches()) { + return null; + } else { + String threadPool = matcher.group(1); + ThreadPool.ThreadPoolType defaultThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPool); + ThreadPool.ThreadPoolType threadPoolType; + try { + threadPoolType = ThreadPool.ThreadPoolType.fromType(value); + } catch (IllegalArgumentException e) { + return e.getMessage(); + } + if (defaultThreadPoolType.equals(threadPoolType)) { + return null; + } else { + return String.format( + Locale.ROOT, + "thread pool type for [%s] can only be updated to [%s] but was [%s]", + threadPool, + defaultThreadPoolType.getType(), + threadPoolType.getType() + ); + } + } + + } + } + } diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy b/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy index 2475c56e814..dbbc4f14d7e 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/untrusted.policy @@ -19,13 +19,16 @@ /* * Limited security policy for scripts. - * This is what is needed for invokeDynamic functionality to work. + * This is what is needed for basic functionality to work. */ grant { // groovy IndyInterface bootstrap requires this property for indy logging permission java.util.PropertyPermission "groovy.indy.logging", "read"; + // needed by Rhino engine exception handling + permission java.util.PropertyPermission "rhino.stack.style", "read"; + // needed IndyInterface selectMethod (setCallSiteTarget) permission java.lang.RuntimePermission "getClassLoader"; }; diff --git a/core/src/test/java/org/elasticsearch/ESExceptionTests.java b/core/src/test/java/org/elasticsearch/ESExceptionTests.java index 4d81b3488cb..5aeaf9601ef 100644 --- a/core/src/test/java/org/elasticsearch/ESExceptionTests.java +++ b/core/src/test/java/org/elasticsearch/ESExceptionTests.java @@ -140,7 +140,7 @@ public class ESExceptionTests extends ESTestCase { new SearchShardTarget("node_1", "foo", 1)); ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", "foo", 2)); - SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1}); + SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", randomBoolean() ? failure1.getCause() : failure.getCause(), new ShardSearchFailure[]{failure, failure1}); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); ex.toXContent(builder, PARAMS); @@ -163,6 +163,21 @@ public class ESExceptionTests extends ESTestCase { String expected = "{\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}},{\"shard\":1,\"index\":\"foo1\",\"node\":\"node_1\",\"reason\":{\"type\":\"query_shard_exception\",\"reason\":\"foobar\",\"index\":\"foo1\"}}]}"; assertEquals(expected, builder.string()); } + { + ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), + new SearchShardTarget("node_1", "foo", 1)); + ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), + new SearchShardTarget("node_1", "foo", 2)); + NullPointerException nullPointerException = new NullPointerException(); + SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", nullPointerException, new ShardSearchFailure[]{failure, failure1}); + assertEquals(nullPointerException, ex.getCause()); + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + ex.toXContent(builder, PARAMS); + builder.endObject(); + String expected = "{\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}}],\"caused_by\":{\"type\":\"null_pointer_exception\",\"reason\":null}}"; + assertEquals(expected, builder.string()); + } } public void testGetRootCause() { diff --git a/core/src/test/java/org/elasticsearch/benchmark/scripts/score/script/NativeNaiveTFIDFScoreScript.java b/core/src/test/java/org/elasticsearch/benchmark/scripts/score/script/NativeNaiveTFIDFScoreScript.java index 9d6a1cd2f07..e96b35df96a 100644 --- a/core/src/test/java/org/elasticsearch/benchmark/scripts/score/script/NativeNaiveTFIDFScoreScript.java +++ b/core/src/test/java/org/elasticsearch/benchmark/scripts/score/script/NativeNaiveTFIDFScoreScript.java @@ -70,7 +70,7 @@ public class NativeNaiveTFIDFScoreScript extends AbstractSearchScript { score += indexFieldTerm.tf() * indexField.docCount() / indexFieldTerm.df(); } } catch (IOException e) { - throw new RuntimeException(); + throw new RuntimeException(e); } } return score; diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTests.java index db10addfe2e..1a32064fe7d 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTests.java @@ -43,7 +43,7 @@ public class CountDownTests extends ESTestCase { try { latch.await(); } catch (InterruptedException e) { - throw new RuntimeException(); + throw new RuntimeException(e); } while (true) { if(frequently()) { diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java index 53c743d34d5..b675d29c9da 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java +++ b/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java @@ -214,7 +214,7 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { Thread.sleep(timeout); } catch (InterruptedException e1) { Thread.currentThread().interrupt(); - throw new RuntimeException(); + throw new RuntimeException(e1); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index 788c3ca3324..ebbe55ce47c 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -20,10 +20,12 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; +import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.get.GetRequest; @@ -78,6 +80,7 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.*; +import org.elasticsearch.script.Script.ScriptParseException; import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; @@ -359,6 +362,29 @@ public abstract class AbstractQueryTestCase> // we'd like to see the offending field name here assertThat(e.getMessage(), containsString("bogusField")); } + } + + /** + * Test that adding additional object into otherwise correct query string + * should always trigger some kind of Parsing Exception. + */ + public void testUnknownObjectException() throws IOException { + String validQuery = createTestQueryBuilder().toString(); + assertThat(validQuery, containsString("{")); + for (int insertionPosition = 0; insertionPosition < validQuery.length(); insertionPosition++) { + if (validQuery.charAt(insertionPosition) == '{') { + String testQuery = validQuery.substring(0, insertionPosition) + "{ \"newField\" : " + validQuery.substring(insertionPosition) + "}"; + try { + parseQuery(testQuery); + fail("some parsing exception expected for query: " + testQuery); + } catch (ParsingException | ScriptParseException | ElasticsearchParseException e) { + // different kinds of exception wordings depending on location + // of mutation, so no simple asserts possible here + } catch (JsonParseException e) { + // mutation produced invalid json + } + } + } } /** @@ -512,7 +538,7 @@ public abstract class AbstractQueryTestCase> testQuery.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { QueryBuilder prototype = queryParser(testQuery.getName()).getBuilderPrototype(); - QueryBuilder deserializedQuery = prototype.readFrom(in); + QueryBuilder deserializedQuery = prototype.readFrom(in); assertEquals(deserializedQuery, testQuery); assertEquals(deserializedQuery.hashCode(), testQuery.hashCode()); assertNotSame(deserializedQuery, testQuery); diff --git a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java index cb3e8dcdc3c..422f779620a 100644 --- a/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/HasChildQueryBuilderTests.java @@ -20,11 +20,15 @@ package org.elasticsearch.index.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import com.fasterxml.jackson.core.JsonParseException; + import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.*; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -35,6 +39,7 @@ import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.query.support.QueryInnerHits; +import org.elasticsearch.script.Script.ScriptParseException; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; @@ -44,7 +49,8 @@ import org.elasticsearch.test.TestSearchContext; import java.io.IOException; import java.util.Collections; -import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; +import static org.hamcrest.Matchers.containsString; + import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; @@ -52,6 +58,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase { - private String[] queryTerms; - @Override protected SimpleQueryStringBuilder doCreateTestQueryBuilder() { - int numberOfTerms = randomIntBetween(1, 5); - queryTerms = new String[numberOfTerms]; - StringBuilder queryString = new StringBuilder(); - for (int i = 0; i < numberOfTerms; i++) { - queryTerms[i] = randomAsciiOfLengthBetween(1, 10); - queryString.append(queryTerms[i] + " "); - } - SimpleQueryStringBuilder result = new SimpleQueryStringBuilder(queryString.toString().trim()); + SimpleQueryStringBuilder result = new SimpleQueryStringBuilder(randomAsciiOfLengthBetween(1, 10)); if (randomBoolean()) { result.analyzeWildcard(randomBoolean()); } @@ -82,13 +74,9 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase flagSet = new HashSet<>(); - if (numberOfTerms > 1) { - flagSet.add(SimpleQueryStringFlag.WHITESPACE); - } int size = randomIntBetween(0, SimpleQueryStringFlag.values().length); for (int i = 0; i < size; i++) { - SimpleQueryStringFlag randomFlag = randomFrom(SimpleQueryStringFlag.values()); - flagSet.add(randomFlag); + flagSet.add(randomFrom(SimpleQueryStringFlag.values())); } if (flagSet.size() > 0) { result.flags(flagSet.toArray(new SimpleQueryStringFlag[flagSet.size()])); @@ -99,12 +87,13 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase fields = new HashMap<>(); for (int i = 0; i < fieldCount; i++) { if (randomBoolean()) { - fields.put("f" + i + "_" + randomAsciiOfLengthBetween(1, 10), AbstractQueryBuilder.DEFAULT_BOOST); + fields.put(randomAsciiOfLengthBetween(1, 10), AbstractQueryBuilder.DEFAULT_BOOST); } else { - fields.put(randomBoolean() ? STRING_FIELD_NAME : "f" + i + "_" + randomAsciiOfLengthBetween(1, 10), 2.0f / randomIntBetween(1, 20)); + fields.put(randomBoolean() ? STRING_FIELD_NAME : randomAsciiOfLengthBetween(1, 10), 2.0f / randomIntBetween(1, 20)); } } result.fields(fields); + return result; } @@ -259,7 +248,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 0 || shardContext.indexVersionCreated().before(Version.V_1_4_0_Beta1)) { Query luceneQuery = queryBuilder.toQuery(shardContext); - assertThat(luceneQuery, instanceOf(BooleanQuery.class)); - TermQuery termQuery = (TermQuery) ((BooleanQuery) luceneQuery).clauses().get(0).getQuery(); + assertThat(luceneQuery, instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) luceneQuery; assertThat(termQuery.getTerm(), equalTo(new Term(MetaData.ALL, query))); } } @@ -288,7 +277,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 1) { assertTrue("Query should have been BooleanQuery but was " + query.getClass().getName(), query instanceof BooleanQuery); BooleanQuery boolQuery = (BooleanQuery) query; @@ -301,42 +290,32 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase expectedFields = new TreeMap(queryBuilder.fields()); - if (expectedFields.size() == 0) { - expectedFields.put(MetaData.ALL, AbstractQueryBuilder.DEFAULT_BOOST); - } - for (int i = 0; i < queryTerms.length; i++) { - BooleanClause booleanClause = boolQuery.clauses().get(i); - Iterator> fieldsIter = expectedFields.entrySet().iterator(); - - if (queryTerms.length == 1 && expectedFields.size() == 1) { - assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) booleanClause.getQuery(); - Entry entry = fieldsIter.next(); - assertThat(termQuery.getTerm().field(), equalTo(entry.getKey())); - assertThat(termQuery.getBoost(), equalTo(entry.getValue())); - assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(queryTerms[i].toLowerCase(Locale.ROOT))); - } else { - assertThat(booleanClause.getQuery(), instanceOf(BooleanQuery.class)); - for (BooleanClause clause : ((BooleanQuery) booleanClause.getQuery()).clauses()) { - TermQuery termQuery = (TermQuery) clause.getQuery(); - Entry entry = fieldsIter.next(); - assertThat(termQuery.getTerm().field(), equalTo(entry.getKey())); - assertThat(termQuery.getBoost(), equalTo(entry.getValue())); - assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(queryTerms[i].toLowerCase(Locale.ROOT))); - } - } + assertThat(boolQuery.clauses().size(), equalTo(queryBuilder.fields().size())); + Iterator fields = queryBuilder.fields().keySet().iterator(); + for (BooleanClause booleanClause : boolQuery) { + assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) booleanClause.getQuery(); + assertThat(termQuery.getTerm().field(), equalTo(fields.next())); + assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(queryBuilder.value().toLowerCase(Locale.ROOT))); } if (queryBuilder.minimumShouldMatch() != null) { - int optionalClauses = queryTerms.length; - if (queryBuilder.defaultOperator().equals(Operator.AND) && queryTerms.length > 1) { - optionalClauses = 0; - } - int expectedMinimumShouldMatch = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch()); - assertEquals(expectedMinimumShouldMatch, boolQuery.getMinimumNumberShouldMatch()); + assertThat(boolQuery.getMinimumNumberShouldMatch(), greaterThan(0)); } + } else if (queryBuilder.fields().size() <= 1) { + assertTrue("Query should have been TermQuery but was " + query.getClass().getName(), query instanceof TermQuery); + + TermQuery termQuery = (TermQuery) query; + String field; + if (queryBuilder.fields().size() == 0) { + field = MetaData.ALL; + } else { + field = queryBuilder.fields().keySet().iterator().next(); + } + assertThat(termQuery.getTerm().field(), equalTo(field)); + assertThat(termQuery.getTerm().text().toLowerCase(Locale.ROOT), equalTo(queryBuilder.value().toLowerCase(Locale.ROOT))); + } else { + fail("Encountered lucene query type we do not have a validation implementation for in our " + SimpleQueryStringBuilderTests.class.getSimpleName()); } } @@ -362,18 +341,15 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 1) { - expectedMinimumShouldMatch = 0; - } - - assertEquals(expectedMinimumShouldMatch, query.getMinimumNumberShouldMatch()); - for (BooleanClause clause : query.clauses()) { - if (numberOfFields == 1 && numberOfTerms == 1) { - assertTrue(clause.getQuery() instanceof TermQuery); - } else { - assertEquals(numberOfFields, ((BooleanQuery) clause.getQuery()).clauses().size()); - } - } - } } diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java index ce5eca5d11c..d4816f8d334 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java +++ b/core/src/test/java/org/elasticsearch/index/query/TemplateQueryIT.java @@ -307,7 +307,7 @@ public class TemplateQueryIT extends ESIntegTestCase { templateParams)).get(); fail("Expected SearchPhaseExecutionException"); } catch (SearchPhaseExecutionException e) { - assertThat(e.getCause().getMessage(), containsString("Illegal index script format")); + assertThat(e.toString(), containsString("Illegal index script format")); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index 3976823aa6b..3307a8429d9 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -42,7 +42,10 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.containsString; public class TermsQueryBuilderTests extends AbstractQueryTestCase { private List randomTerms; @@ -195,9 +198,9 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase query 1"); SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").minimumShouldMatch("2")).get(); assertHitCount(searchResponse, 2l); @@ -119,13 +120,7 @@ public class SimpleQueryStringIT extends ESIntegTestCase { assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "3", "4"); - logger.info("--> query 3"); // test case from #13884 - searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo") - .field("body").field("body2").field("body3").minimumShouldMatch("-50%")).get(); - assertHitCount(searchResponse, 3l); - assertSearchHits(searchResponse, "1", "3", "4"); - - logger.info("--> query 4"); + logger.info("--> query 3"); searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body").field("body2").minimumShouldMatch("70%")).get(); assertHitCount(searchResponse, 2l); assertSearchHits(searchResponse, "3", "4"); @@ -136,17 +131,17 @@ public class SimpleQueryStringIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "7").setSource("body2", "foo bar", "other", "foo"), client().prepareIndex("test", "type1", "8").setSource("body2", "foo baz bar", "other", "foo")); - logger.info("--> query 5"); + logger.info("--> query 4"); searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").field("body").field("body2").minimumShouldMatch("2")).get(); assertHitCount(searchResponse, 4l); assertSearchHits(searchResponse, "3", "4", "7", "8"); - logger.info("--> query 6"); + logger.info("--> query 5"); searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").minimumShouldMatch("2")).get(); assertHitCount(searchResponse, 5l); assertSearchHits(searchResponse, "3", "4", "6", "7", "8"); - logger.info("--> query 7"); + logger.info("--> query 6"); searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body2").field("other").minimumShouldMatch("70%")).get(); assertHitCount(searchResponse, 3l); assertSearchHits(searchResponse, "6", "7", "8"); diff --git a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index d52f67dc82c..838c2a6d401 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -46,20 +46,13 @@ import java.lang.management.ThreadMXBean; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.concurrent.BrokenBarrierException; -import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.Executor; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; +import java.util.concurrent.*; import java.util.regex.Pattern; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.sameInstance; +import static org.hamcrest.Matchers.*; /** */ @@ -67,7 +60,7 @@ import static org.hamcrest.Matchers.sameInstance; public class SimpleThreadPoolIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { - return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).put("threadpool.search.type", "cached").build(); + return Settings.settingsBuilder().build(); } public void testThreadNames() throws Exception { @@ -130,26 +123,23 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { internalCluster().startNodesAsync(2).get(); ThreadPool threadPool = internalCluster().getDataNodeInstance(ThreadPool.class); // Check that settings are changed - assertThat(((ThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(5L)); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put("threadpool.search.keep_alive", "10m").build()).execute().actionGet(); - assertThat(((ThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(10L)); + assertThat(((ThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getQueue().remainingCapacity(), equalTo(1000)); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put("threadpool.search.queue_size", 2000).build()).execute().actionGet(); + assertThat(((ThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getQueue().remainingCapacity(), equalTo(2000)); // Make sure that threads continue executing when executor is replaced final CyclicBarrier barrier = new CyclicBarrier(2); Executor oldExecutor = threadPool.executor(Names.SEARCH); - threadPool.executor(Names.SEARCH).execute(new Runnable() { - @Override - public void run() { - try { - barrier.await(); - } catch (InterruptedException ex) { - Thread.currentThread().interrupt(); - } catch (BrokenBarrierException ex) { - // - } - } - }); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put("threadpool.search.type", "fixed").build()).execute().actionGet(); + threadPool.executor(Names.SEARCH).execute(() -> { + try { + barrier.await(); + } catch (InterruptedException ex) { + Thread.currentThread().interrupt(); + } catch (BrokenBarrierException ex) { + // + } + }); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put("threadpool.search.queue_size", 1000).build()).execute().actionGet(); assertThat(threadPool.executor(Names.SEARCH), not(sameInstance(oldExecutor))); assertThat(((ThreadPoolExecutor) oldExecutor).isShutdown(), equalTo(true)); assertThat(((ThreadPoolExecutor) oldExecutor).isTerminating(), equalTo(true)); @@ -157,24 +147,19 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { barrier.await(10, TimeUnit.SECONDS); // Make sure that new thread executor is functional - threadPool.executor(Names.SEARCH).execute(new Runnable() { - @Override - public void run() { - try { - barrier.await(); - } catch (InterruptedException ex) { - Thread.currentThread().interrupt(); - } catch (BrokenBarrierException ex) { - // + threadPool.executor(Names.SEARCH).execute(() -> { + try { + barrier.await(); + } catch (InterruptedException ex) { + Thread.currentThread().interrupt(); + } catch (BrokenBarrierException ex) { + // + } } - } - }); - client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put("threadpool.search.type", "fixed").build()).execute().actionGet(); + ); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(settingsBuilder().put("threadpool.search.queue_size", 500)).execute().actionGet(); barrier.await(10, TimeUnit.SECONDS); - // This was here: Thread.sleep(200); - // Why? What was it for? - // Check that node info is correct NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().all().execute().actionGet(); for (int i = 0; i < 2; i++) { @@ -182,7 +167,7 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { boolean found = false; for (ThreadPool.Info info : nodeInfo.getThreadPool()) { if (info.getName().equals(Names.SEARCH)) { - assertThat(info.getType(), equalTo("fixed")); + assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); found = true; break; } diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java index cb27fd71f9d..3d57c1d5206 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.threadpool; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -30,7 +31,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; +import java.io.IOException; import java.util.Map; import static org.elasticsearch.common.settings.Settings.settingsBuilder; @@ -44,9 +47,16 @@ import static org.hamcrest.Matchers.nullValue; */ public class ThreadPoolSerializationTests extends ESTestCase { BytesStreamOutput output = new BytesStreamOutput(); + private ThreadPool.ThreadPoolType threadPoolType; + + @Before + public void setUp() throws Exception { + super.setUp(); + threadPoolType = randomFrom(ThreadPool.ThreadPoolType.values()); + } public void testThatQueueSizeSerializationWorks() throws Exception { - ThreadPool.Info info = new ThreadPool.Info("foo", "search", 1, 10, TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("10k")); + ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10, TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("10k")); output.setVersion(Version.CURRENT); info.writeTo(output); @@ -58,7 +68,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { } public void testThatNegativeQueueSizesCanBeSerialized() throws Exception { - ThreadPool.Info info = new ThreadPool.Info("foo", "search", 1, 10, TimeValue.timeValueMillis(3000), null); + ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10, TimeValue.timeValueMillis(3000), null); output.setVersion(Version.CURRENT); info.writeTo(output); @@ -70,7 +80,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { } public void testThatToXContentWritesOutUnboundedCorrectly() throws Exception { - ThreadPool.Info info = new ThreadPool.Info("foo", "search", 1, 10, TimeValue.timeValueMillis(3000), null); + ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10, TimeValue.timeValueMillis(3000), null); XContentBuilder builder = jsonBuilder(); builder.startObject(); info.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -95,7 +105,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { } public void testThatToXContentWritesInteger() throws Exception { - ThreadPool.Info info = new ThreadPool.Info("foo", "search", 1, 10, TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("1k")); + ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType, 1, 10, TimeValue.timeValueMillis(3000), SizeValue.parseSizeValue("1k")); XContentBuilder builder = jsonBuilder(); builder.startObject(); info.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -111,4 +121,16 @@ public class ThreadPoolSerializationTests extends ESTestCase { assertThat(map, hasKey("queue_size")); assertThat(map.get("queue_size").toString(), is("1000")); } + + public void testThatThreadPoolTypeIsSerializedCorrectly() throws IOException { + ThreadPool.Info info = new ThreadPool.Info("foo", threadPoolType); + output.setVersion(Version.CURRENT); + info.writeTo(output); + + StreamInput input = StreamInput.wrap(output.bytes()); + ThreadPool.Info newInfo = new ThreadPool.Info(); + newInfo.readFrom(input); + + assertThat(newInfo.getThreadPoolType(), is(threadPoolType)); + } } diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTypeSettingsValidatorTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTypeSettingsValidatorTests.java new file mode 100644 index 00000000000..3dfca5cb283 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolTypeSettingsValidatorTests.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed wit[√h + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.threadpool; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.settings.Validator; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.*; + +import static org.junit.Assert.*; + +public class ThreadPoolTypeSettingsValidatorTests extends ESTestCase { + private Validator validator; + + @Before + public void setUp() throws Exception { + super.setUp(); + validator = ThreadPool.THREAD_POOL_TYPE_SETTINGS_VALIDATOR; + } + + public void testValidThreadPoolTypeSettings() { + for (Map.Entry entry : ThreadPool.THREAD_POOL_TYPES.entrySet()) { + assertNull(validateSetting(validator, entry.getKey(), entry.getValue().getType())); + } + } + + public void testInvalidThreadPoolTypeSettings() { + for (Map.Entry entry : ThreadPool.THREAD_POOL_TYPES.entrySet()) { + Set set = new HashSet<>(); + set.addAll(Arrays.asList(ThreadPool.ThreadPoolType.values())); + set.remove(entry.getValue()); + ThreadPool.ThreadPoolType invalidThreadPoolType = randomFrom(set.toArray(new ThreadPool.ThreadPoolType[set.size()])); + String expectedMessage = String.format( + Locale.ROOT, + "thread pool type for [%s] can only be updated to [%s] but was [%s]", + entry.getKey(), + entry.getValue().getType(), + invalidThreadPoolType.getType()); + String message = validateSetting(validator, entry.getKey(), invalidThreadPoolType.getType()); + assertNotNull(message); + assertEquals(expectedMessage, message); + } + } + + public void testNonThreadPoolTypeSetting() { + String setting = ThreadPool.THREADPOOL_GROUP + randomAsciiOfLength(10) + "foo"; + String value = randomAsciiOfLength(10); + assertNull(validator.validate(setting, value, ClusterState.PROTO)); + } + + private String validateSetting(Validator validator, String threadPoolName, String value) { + return validator.validate(ThreadPool.THREADPOOL_GROUP + threadPoolName + ".type", value, ClusterState.PROTO); + } +} diff --git a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index cd252b60d71..95ceea1e490 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -25,22 +25,336 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool.Names; import java.lang.reflect.Field; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import static org.elasticsearch.common.settings.Settings.settingsBuilder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.sameInstance; +import static org.hamcrest.Matchers.*; /** */ public class UpdateThreadPoolSettingsTests extends ESTestCase { + public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedException { + String threadPoolName = randomThreadPoolName(); + ThreadPool.ThreadPoolType correctThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName); + ThreadPool threadPool = null; + try { + threadPool = new ThreadPool(settingsBuilder() + .put("name", "testCorrectThreadPoolTypePermittedInSettings") + .put("threadpool." + threadPoolName + ".type", correctThreadPoolType.getType()) + .build()); + ThreadPool.Info info = info(threadPool, threadPoolName); + if (ThreadPool.Names.SAME.equals(threadPoolName)) { + assertNull(info); // we don't report on the "same" threadpool + } else { + // otherwise check we have the expected type + assertEquals(info.getThreadPoolType(), correctThreadPoolType); + } + } finally { + terminateThreadPoolIfNeeded(threadPool); + } + } + + public void testThreadPoolCanNotOverrideThreadPoolType() throws InterruptedException { + String threadPoolName = randomThreadPoolName(); + ThreadPool.ThreadPoolType incorrectThreadPoolType = randomIncorrectThreadPoolType(threadPoolName); + ThreadPool.ThreadPoolType correctThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName); + ThreadPool threadPool = null; + try { + threadPool = new ThreadPool( + settingsBuilder() + .put("name", "testThreadPoolCanNotOverrideThreadPoolType") + .put("threadpool." + threadPoolName + ".type", incorrectThreadPoolType.getType()) + .build()); + terminate(threadPool); + fail("expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat( + e.getMessage(), + is("setting threadpool." + threadPoolName + ".type to " + incorrectThreadPoolType.getType() + " is not permitted; must be " + correctThreadPoolType.getType())); + } finally { + terminateThreadPoolIfNeeded(threadPool); + } + } + + public void testUpdateSettingsCanNotChangeThreadPoolType() throws InterruptedException { + String threadPoolName = randomThreadPoolName(); + ThreadPool.ThreadPoolType invalidThreadPoolType = randomIncorrectThreadPoolType(threadPoolName); + ThreadPool.ThreadPoolType validThreadPoolType = ThreadPool.THREAD_POOL_TYPES.get(threadPoolName); + ThreadPool threadPool = null; + try { + threadPool = new ThreadPool(settingsBuilder().put("name", "testUpdateSettingsCanNotChangeThreadPoolType").build()); + + + threadPool.updateSettings( + settingsBuilder() + .put("threadpool." + threadPoolName + ".type", invalidThreadPoolType.getType()) + .build() + ); + fail("expected IllegalArgumentException"); + } catch (IllegalArgumentException e) { + assertThat( + e.getMessage(), + is("setting threadpool." + threadPoolName + ".type to " + invalidThreadPoolType.getType() + " is not permitted; must be " + validThreadPoolType.getType())); + } finally { + terminateThreadPoolIfNeeded(threadPool); + } + } + + public void testCachedExecutorType() throws InterruptedException { + String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.CACHED); + ThreadPool threadPool = null; + try { + threadPool = new ThreadPool( + Settings.settingsBuilder() + .put("name", "testCachedExecutorType").build()); + + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED); + assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); + + threadPool.updateSettings(settingsBuilder() + .put("threadpool." + threadPoolName + ".keep_alive", "10m") + .build()); + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED); + assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getCorePoolSize(), equalTo(0)); + // Make sure keep alive value changed + assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(10L)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(10L)); + + // Make sure keep alive value reused + assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(10L)); + assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); + + // Change keep alive + Executor oldExecutor = threadPool.executor(threadPoolName); + threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); + // Make sure keep alive value changed + assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(1L)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L)); + // Make sure executor didn't change + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED); + assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor)); + + // Set the same keep alive + threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".keep_alive", "1m").build()); + // Make sure keep alive value didn't change + assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(1L)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L)); + // Make sure executor didn't change + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.CACHED); + assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor)); + } finally { + terminateThreadPoolIfNeeded(threadPool); + } + } + + public void testFixedExecutorType() throws InterruptedException { + String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.FIXED); + ThreadPool threadPool = null; + + try { + threadPool = new ThreadPool(settingsBuilder() + .put("name", "testCachedExecutorType").build()); + assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); + + threadPool.updateSettings(settingsBuilder() + .put("threadpool." + threadPoolName + ".size", "15") + .build()); + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); + assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getCorePoolSize(), equalTo(15)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getMaximumPoolSize(), equalTo(15)); + assertThat(info(threadPool, threadPoolName).getMin(), equalTo(15)); + assertThat(info(threadPool, threadPoolName).getMax(), equalTo(15)); + // keep alive does not apply to fixed thread pools + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(0L)); + + // Put old type back + threadPool.updateSettings(Settings.EMPTY); + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); + // Make sure keep alive value is not used + assertThat(info(threadPool, threadPoolName).getKeepAlive(), nullValue()); + // Make sure keep pool size value were reused + assertThat(info(threadPool, threadPoolName).getMin(), equalTo(15)); + assertThat(info(threadPool, threadPoolName).getMax(), equalTo(15)); + assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getCorePoolSize(), equalTo(15)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getMaximumPoolSize(), equalTo(15)); + + // Change size + Executor oldExecutor = threadPool.executor(threadPoolName); + threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".size", "10").build()); + // Make sure size values changed + assertThat(info(threadPool, threadPoolName).getMax(), equalTo(10)); + assertThat(info(threadPool, threadPoolName).getMin(), equalTo(10)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getMaximumPoolSize(), equalTo(10)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getCorePoolSize(), equalTo(10)); + // Make sure executor didn't change + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); + assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor)); + + // Change queue capacity + threadPool.updateSettings(settingsBuilder() + .put("threadpool." + threadPoolName + ".queue", "500") + .build()); + } finally { + terminateThreadPoolIfNeeded(threadPool); + } + } + + public void testScalingExecutorType() throws InterruptedException { + String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.SCALING); + ThreadPool threadPool = null; + try { + threadPool = new ThreadPool(settingsBuilder() + .put("threadpool." + threadPoolName + ".size", 10) + .put("name", "testCachedExecutorType").build()); + assertThat(info(threadPool, threadPoolName).getMin(), equalTo(1)); + assertThat(info(threadPool, threadPoolName).getMax(), equalTo(10)); + assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(5L)); + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.SCALING); + assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); + + // Change settings that doesn't require pool replacement + Executor oldExecutor = threadPool.executor(threadPoolName); + threadPool.updateSettings(settingsBuilder() + .put("threadpool." + threadPoolName + ".keep_alive", "10m") + .put("threadpool." + threadPoolName + ".min", "2") + .put("threadpool." + threadPoolName + ".size", "15") + .build()); + assertEquals(info(threadPool, threadPoolName).getThreadPoolType(), ThreadPool.ThreadPoolType.SCALING); + assertThat(threadPool.executor(threadPoolName), instanceOf(EsThreadPoolExecutor.class)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getCorePoolSize(), equalTo(2)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getMaximumPoolSize(), equalTo(15)); + assertThat(info(threadPool, threadPoolName).getMin(), equalTo(2)); + assertThat(info(threadPool, threadPoolName).getMax(), equalTo(15)); + // Make sure keep alive value changed + assertThat(info(threadPool, threadPoolName).getKeepAlive().minutes(), equalTo(10L)); + assertThat(((EsThreadPoolExecutor) threadPool.executor(threadPoolName)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(10L)); + assertThat(threadPool.executor(threadPoolName), sameInstance(oldExecutor)); + } finally { + terminateThreadPoolIfNeeded(threadPool); + } + } + + public void testShutdownNowInterrupts() throws Exception { + String threadPoolName = randomThreadPool(ThreadPool.ThreadPoolType.FIXED); + ThreadPool threadPool = null; + try { + threadPool = new ThreadPool(Settings.settingsBuilder() + .put("threadpool." + threadPoolName + ".queue_size", 1000) + .put("name", "testCachedExecutorType").build()); + assertEquals(info(threadPool, threadPoolName).getQueueSize().getSingles(), 1000L); + + final CountDownLatch latch = new CountDownLatch(1); + ThreadPoolExecutor oldExecutor = (ThreadPoolExecutor) threadPool.executor(threadPoolName); + threadPool.executor(threadPoolName).execute(() -> { + try { + new CountDownLatch(1).await(); + } catch (InterruptedException ex) { + latch.countDown(); + Thread.currentThread().interrupt(); + } + } + ); + threadPool.updateSettings(settingsBuilder().put("threadpool." + threadPoolName + ".queue_size", 2000).build()); + assertThat(threadPool.executor(threadPoolName), not(sameInstance(oldExecutor))); + assertThat(oldExecutor.isShutdown(), equalTo(true)); + assertThat(oldExecutor.isTerminating(), equalTo(true)); + assertThat(oldExecutor.isTerminated(), equalTo(false)); + threadPool.shutdownNow(); // should interrupt the thread + latch.await(3, TimeUnit.SECONDS); // If this throws then ThreadPool#shutdownNow didn't interrupt + } finally { + terminateThreadPoolIfNeeded(threadPool); + } + } + + public void testCustomThreadPool() throws Exception { + ThreadPool threadPool = null; + try { + threadPool = new ThreadPool(Settings.settingsBuilder() + .put("threadpool.my_pool1.type", "scaling") + .put("threadpool.my_pool2.type", "fixed") + .put("threadpool.my_pool2.size", "1") + .put("threadpool.my_pool2.queue_size", "1") + .put("name", "testCustomThreadPool").build()); + ThreadPoolInfo groups = threadPool.info(); + boolean foundPool1 = false; + boolean foundPool2 = false; + outer: + for (ThreadPool.Info info : groups) { + if ("my_pool1".equals(info.getName())) { + foundPool1 = true; + assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.SCALING); + } else if ("my_pool2".equals(info.getName())) { + foundPool2 = true; + assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); + assertThat(info.getMin(), equalTo(1)); + assertThat(info.getMax(), equalTo(1)); + assertThat(info.getQueueSize().singles(), equalTo(1l)); + } else { + for (Field field : Names.class.getFields()) { + if (info.getName().equalsIgnoreCase(field.getName())) { + // This is ok it is a default thread pool + continue outer; + } + } + fail("Unexpected pool name: " + info.getName()); + } + } + assertThat(foundPool1, is(true)); + assertThat(foundPool2, is(true)); + + // Updating my_pool2 + Settings settings = Settings.builder() + .put("threadpool.my_pool2.size", "10") + .build(); + threadPool.updateSettings(settings); + + groups = threadPool.info(); + foundPool1 = false; + foundPool2 = false; + outer: + for (ThreadPool.Info info : groups) { + if ("my_pool1".equals(info.getName())) { + foundPool1 = true; + assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.SCALING); + } else if ("my_pool2".equals(info.getName())) { + foundPool2 = true; + assertThat(info.getMax(), equalTo(10)); + assertThat(info.getMin(), equalTo(10)); + assertThat(info.getQueueSize().singles(), equalTo(1l)); + assertEquals(info.getThreadPoolType(), ThreadPool.ThreadPoolType.FIXED); + } else { + for (Field field : Names.class.getFields()) { + if (info.getName().equalsIgnoreCase(field.getName())) { + // This is ok it is a default thread pool + continue outer; + } + } + fail("Unexpected pool name: " + info.getName()); + } + } + assertThat(foundPool1, is(true)); + assertThat(foundPool2, is(true)); + } finally { + terminateThreadPoolIfNeeded(threadPool); + } + } + + private void terminateThreadPoolIfNeeded(ThreadPool threadPool) throws InterruptedException { + if (threadPool != null) { + terminate(threadPool); + } + } + private ThreadPool.Info info(ThreadPool threadPool, String name) { for (ThreadPool.Info info : threadPool.info()) { if (info.getName().equals(name)) { @@ -50,247 +364,20 @@ public class UpdateThreadPoolSettingsTests extends ESTestCase { return null; } - public void testCachedExecutorType() throws InterruptedException { - ThreadPool threadPool = new ThreadPool( - Settings.settingsBuilder() - .put("threadpool.search.type", "cached") - .put("name","testCachedExecutorType").build()); - - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("cached")); - assertThat(info(threadPool, Names.SEARCH).getKeepAlive().minutes(), equalTo(5L)); - assertThat(threadPool.executor(Names.SEARCH), instanceOf(EsThreadPoolExecutor.class)); - - // Replace with different type - threadPool.updateSettings(settingsBuilder().put("threadpool.search.type", "same").build()); - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("same")); - assertThat(threadPool.executor(Names.SEARCH), is(ThreadPool.DIRECT_EXECUTOR)); - - // Replace with different type again - threadPool.updateSettings(settingsBuilder() - .put("threadpool.search.type", "scaling") - .put("threadpool.search.keep_alive", "10m") - .build()); - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("scaling")); - assertThat(threadPool.executor(Names.SEARCH), instanceOf(EsThreadPoolExecutor.class)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getCorePoolSize(), equalTo(1)); - // Make sure keep alive value changed - assertThat(info(threadPool, Names.SEARCH).getKeepAlive().minutes(), equalTo(10L)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(10L)); - - // Put old type back - threadPool.updateSettings(settingsBuilder().put("threadpool.search.type", "cached").build()); - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("cached")); - // Make sure keep alive value reused - assertThat(info(threadPool, Names.SEARCH).getKeepAlive().minutes(), equalTo(10L)); - assertThat(threadPool.executor(Names.SEARCH), instanceOf(EsThreadPoolExecutor.class)); - - // Change keep alive - Executor oldExecutor = threadPool.executor(Names.SEARCH); - threadPool.updateSettings(settingsBuilder().put("threadpool.search.keep_alive", "1m").build()); - // Make sure keep alive value changed - assertThat(info(threadPool, Names.SEARCH).getKeepAlive().minutes(), equalTo(1L)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L)); - // Make sure executor didn't change - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("cached")); - assertThat(threadPool.executor(Names.SEARCH), sameInstance(oldExecutor)); - - // Set the same keep alive - threadPool.updateSettings(settingsBuilder().put("threadpool.search.keep_alive", "1m").build()); - // Make sure keep alive value didn't change - assertThat(info(threadPool, Names.SEARCH).getKeepAlive().minutes(), equalTo(1L)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(1L)); - // Make sure executor didn't change - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("cached")); - assertThat(threadPool.executor(Names.SEARCH), sameInstance(oldExecutor)); - terminate(threadPool); + private String randomThreadPoolName() { + Set threadPoolNames = ThreadPool.THREAD_POOL_TYPES.keySet(); + return randomFrom(threadPoolNames.toArray(new String[threadPoolNames.size()])); } - public void testFixedExecutorType() throws InterruptedException { - ThreadPool threadPool = new ThreadPool(settingsBuilder() - .put("threadpool.search.type", "fixed") - .put("name","testCachedExecutorType").build()); - - assertThat(threadPool.executor(Names.SEARCH), instanceOf(EsThreadPoolExecutor.class)); - - // Replace with different type - threadPool.updateSettings(settingsBuilder() - .put("threadpool.search.type", "scaling") - .put("threadpool.search.keep_alive", "10m") - .put("threadpool.search.min", "2") - .put("threadpool.search.size", "15") - .build()); - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("scaling")); - assertThat(threadPool.executor(Names.SEARCH), instanceOf(EsThreadPoolExecutor.class)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getCorePoolSize(), equalTo(2)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getMaximumPoolSize(), equalTo(15)); - assertThat(info(threadPool, Names.SEARCH).getMin(), equalTo(2)); - assertThat(info(threadPool, Names.SEARCH).getMax(), equalTo(15)); - // Make sure keep alive value changed - assertThat(info(threadPool, Names.SEARCH).getKeepAlive().minutes(), equalTo(10L)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(10L)); - - // Put old type back - threadPool.updateSettings(settingsBuilder() - .put("threadpool.search.type", "fixed") - .build()); - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("fixed")); - // Make sure keep alive value is not used - assertThat(info(threadPool, Names.SEARCH).getKeepAlive(), nullValue()); - // Make sure keep pool size value were reused - assertThat(info(threadPool, Names.SEARCH).getMin(), equalTo(15)); - assertThat(info(threadPool, Names.SEARCH).getMax(), equalTo(15)); - assertThat(threadPool.executor(Names.SEARCH), instanceOf(EsThreadPoolExecutor.class)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getCorePoolSize(), equalTo(15)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getMaximumPoolSize(), equalTo(15)); - - // Change size - Executor oldExecutor = threadPool.executor(Names.SEARCH); - threadPool.updateSettings(settingsBuilder().put("threadpool.search.size", "10").build()); - // Make sure size values changed - assertThat(info(threadPool, Names.SEARCH).getMax(), equalTo(10)); - assertThat(info(threadPool, Names.SEARCH).getMin(), equalTo(10)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getMaximumPoolSize(), equalTo(10)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getCorePoolSize(), equalTo(10)); - // Make sure executor didn't change - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("fixed")); - assertThat(threadPool.executor(Names.SEARCH), sameInstance(oldExecutor)); - - // Change queue capacity - threadPool.updateSettings(settingsBuilder() - .put("threadpool.search.queue", "500") - .build()); - - terminate(threadPool); + private ThreadPool.ThreadPoolType randomIncorrectThreadPoolType(String threadPoolName) { + Set set = new HashSet<>(); + set.addAll(Arrays.asList(ThreadPool.ThreadPoolType.values())); + set.remove(ThreadPool.THREAD_POOL_TYPES.get(threadPoolName)); + ThreadPool.ThreadPoolType invalidThreadPoolType = randomFrom(set.toArray(new ThreadPool.ThreadPoolType[set.size()])); + return invalidThreadPoolType; } - public void testScalingExecutorType() throws InterruptedException { - ThreadPool threadPool = new ThreadPool(settingsBuilder() - .put("threadpool.search.type", "scaling") - .put("threadpool.search.size", 10) - .put("name","testCachedExecutorType").build()); - - assertThat(info(threadPool, Names.SEARCH).getMin(), equalTo(1)); - assertThat(info(threadPool, Names.SEARCH).getMax(), equalTo(10)); - assertThat(info(threadPool, Names.SEARCH).getKeepAlive().minutes(), equalTo(5L)); - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("scaling")); - assertThat(threadPool.executor(Names.SEARCH), instanceOf(EsThreadPoolExecutor.class)); - - // Change settings that doesn't require pool replacement - Executor oldExecutor = threadPool.executor(Names.SEARCH); - threadPool.updateSettings(settingsBuilder() - .put("threadpool.search.type", "scaling") - .put("threadpool.search.keep_alive", "10m") - .put("threadpool.search.min", "2") - .put("threadpool.search.size", "15") - .build()); - assertThat(info(threadPool, Names.SEARCH).getType(), equalTo("scaling")); - assertThat(threadPool.executor(Names.SEARCH), instanceOf(EsThreadPoolExecutor.class)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getCorePoolSize(), equalTo(2)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getMaximumPoolSize(), equalTo(15)); - assertThat(info(threadPool, Names.SEARCH).getMin(), equalTo(2)); - assertThat(info(threadPool, Names.SEARCH).getMax(), equalTo(15)); - // Make sure keep alive value changed - assertThat(info(threadPool, Names.SEARCH).getKeepAlive().minutes(), equalTo(10L)); - assertThat(((EsThreadPoolExecutor) threadPool.executor(Names.SEARCH)).getKeepAliveTime(TimeUnit.MINUTES), equalTo(10L)); - assertThat(threadPool.executor(Names.SEARCH), sameInstance(oldExecutor)); - - terminate(threadPool); + private String randomThreadPool(ThreadPool.ThreadPoolType type) { + return randomFrom(ThreadPool.THREAD_POOL_TYPES.entrySet().stream().filter(t -> t.getValue().equals(type)).map(t -> t.getKey()).collect(Collectors.toList())); } - - public void testShutdownNowInterrupts() throws Exception { - ThreadPool threadPool = new ThreadPool(Settings.settingsBuilder() - .put("threadpool.search.type", "cached") - .put("name","testCachedExecutorType").build()); - - final CountDownLatch latch = new CountDownLatch(1); - ThreadPoolExecutor oldExecutor = (ThreadPoolExecutor) threadPool.executor(Names.SEARCH); - threadPool.executor(Names.SEARCH).execute(new Runnable() { - @Override - public void run() { - try { - new CountDownLatch(1).await(); - } catch (InterruptedException ex) { - latch.countDown(); - Thread.currentThread().interrupt(); - } - } - }); - threadPool.updateSettings(settingsBuilder().put("threadpool.search.type", "fixed").build()); - assertThat(threadPool.executor(Names.SEARCH), not(sameInstance(oldExecutor))); - assertThat(oldExecutor.isShutdown(), equalTo(true)); - assertThat(oldExecutor.isTerminating(), equalTo(true)); - assertThat(oldExecutor.isTerminated(), equalTo(false)); - threadPool.shutdownNow(); // should interrupt the thread - latch.await(3, TimeUnit.SECONDS); // If this throws then shotdownNow didn't interrupt - terminate(threadPool); - } - - public void testCustomThreadPool() throws Exception { - ThreadPool threadPool = new ThreadPool(Settings.settingsBuilder() - .put("threadpool.my_pool1.type", "cached") - .put("threadpool.my_pool2.type", "fixed") - .put("threadpool.my_pool2.size", "1") - .put("threadpool.my_pool2.queue_size", "1") - .put("name", "testCustomThreadPool").build()); - - ThreadPoolInfo groups = threadPool.info(); - boolean foundPool1 = false; - boolean foundPool2 = false; - outer: for (ThreadPool.Info info : groups) { - if ("my_pool1".equals(info.getName())) { - foundPool1 = true; - assertThat(info.getType(), equalTo("cached")); - } else if ("my_pool2".equals(info.getName())) { - foundPool2 = true; - assertThat(info.getType(), equalTo("fixed")); - assertThat(info.getMin(), equalTo(1)); - assertThat(info.getMax(), equalTo(1)); - assertThat(info.getQueueSize().singles(), equalTo(1l)); - } else { - for (Field field : Names.class.getFields()) { - if (info.getName().equalsIgnoreCase(field.getName())) { - // This is ok it is a default thread pool - continue outer; - } - } - fail("Unexpected pool name: " + info.getName()); - } - } - assertThat(foundPool1, is(true)); - assertThat(foundPool2, is(true)); - - // Updating my_pool2 - Settings settings = Settings.builder() - .put("threadpool.my_pool2.size", "10") - .build(); - threadPool.updateSettings(settings); - - groups = threadPool.info(); - foundPool1 = false; - foundPool2 = false; - outer: for (ThreadPool.Info info : groups) { - if ("my_pool1".equals(info.getName())) { - foundPool1 = true; - assertThat(info.getType(), equalTo("cached")); - } else if ("my_pool2".equals(info.getName())) { - foundPool2 = true; - assertThat(info.getMax(), equalTo(10)); - assertThat(info.getMin(), equalTo(10)); - assertThat(info.getQueueSize().singles(), equalTo(1l)); - assertThat(info.getType(), equalTo("fixed")); - } else { - for (Field field : Names.class.getFields()) { - if (info.getName().equalsIgnoreCase(field.getName())) { - // This is ok it is a default thread pool - continue outer; - } - } - fail("Unexpected pool name: " + info.getName()); - } - } - assertThat(foundPool1, is(true)); - assertThat(foundPool2, is(true)); - terminate(threadPool); - } - } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java b/core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java index cbac0aded0e..9581dfff42f 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/KeyedLockTests.java @@ -112,7 +112,7 @@ public class KeyedLockTests extends ESTestCase { try { startLatch.await(); } catch (InterruptedException e) { - throw new RuntimeException(); + throw new RuntimeException(e); } int numRuns = scaledRandomIntBetween(5000, 50000); for (int i = 0; i < numRuns; i++) { diff --git a/docs/plugins/authors.asciidoc b/docs/plugins/authors.asciidoc index c0f310683f3..75b7776ec09 100644 --- a/docs/plugins/authors.asciidoc +++ b/docs/plugins/authors.asciidoc @@ -112,7 +112,7 @@ directory in the root of the plugin should be served. === Testing your plugin When testing a Java plugin, it will only be auto-loaded if it is in the -`plugins/` directory. Use `bin/plugin install file://path/to/your/plugin` +`plugins/` directory. Use `bin/plugin install file:///path/to/your/plugin` to install your plugin for testing. You may also load your plugin within the test framework for integration tests. diff --git a/docs/plugins/discovery-ec2.asciidoc b/docs/plugins/discovery-ec2.asciidoc index 5ac208576df..a2b80495003 100644 --- a/docs/plugins/discovery-ec2.asciidoc +++ b/docs/plugins/discovery-ec2.asciidoc @@ -165,6 +165,11 @@ The following are a list of settings (prefixed with `discovery.ec2`) that can fu Defaults to `3s`. If no unit like `ms`, `s` or `m` is specified, milliseconds are used. +`node_cache_time`:: + + How long the list of hosts is cached to prevent further requests to the AWS API. + Defaults to `10s`. + [IMPORTANT] .Binding the network host @@ -195,7 +200,6 @@ as valid network host settings: |`_ec2_` |equivalent to _ec2:privateIpv4_. |================================================================== - [[discovery-ec2-permissions]] ===== Recommended EC2 Permissions diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index 52ff574cfc7..e334fcc718f 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -101,7 +101,7 @@ For instance, to install a plugin from your local file system, you could run: [source,shell] ----------------------------------- -sudo bin/plugin install file:/path/to/plugin.zip +sudo bin/plugin install file:///path/to/plugin.zip ----------------------------------- [[listing-removing]] diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index e0bdff8ddee..f190b7fdce6 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -117,6 +117,10 @@ Removed support for multiple highlighter names, the only supported ones are: `pl Removed support for the deprecated top level `filter` in the search api, replaced by `post_filter`. +==== `query_binary` and `filter_binary` removed + +Removed support for the undocumented `query_binary` and `filter_binary` sections of a search request. + === Parent/Child changes The `children` aggregation, parent child inner hits and `has_child` and `has_parent` queries will not work on indices @@ -389,4 +393,12 @@ request cache and the field data cache. This setting would arbitrarily pick the first interface not marked as loopback. Instead, specify by address scope (e.g. `_local_,_site_` for all loopback and private network addresses) or by explicit interface names, -hostnames, or addresses. +hostnames, or addresses. + +=== Forbid changing of thread pool types + +Previously, <> could be dynamically adjusted. The thread pool type effectively +controls the backing queue for the thread pool and modifying this is an expert setting with minimal practical benefits +and high risk of being misused. The ability to change the thread pool type for any thread pool has been removed; do note +that it is still possible to adjust relevant thread pool parameters for each of the thread pools (e.g., depending on +the thread pool type, `keep_alive`, `queue_size`, etc.). diff --git a/docs/reference/modules/threadpool.asciidoc b/docs/reference/modules/threadpool.asciidoc index 591277889bc..bfd5474183c 100644 --- a/docs/reference/modules/threadpool.asciidoc +++ b/docs/reference/modules/threadpool.asciidoc @@ -9,87 +9,92 @@ of discarded. There are several thread pools, but the important ones include: +`generic`:: + For generic operations (e.g., background node discovery). + Thread pool type is `cached`. + `index`:: - For index/delete operations. Defaults to `fixed` + For index/delete operations. Thread pool type is `fixed` with a size of `# of available processors`, queue_size of `200`. `search`:: - For count/search operations. Defaults to `fixed` + For count/search operations. Thread pool type is `fixed` with a size of `int((# of available_processors * 3) / 2) + 1`, queue_size of `1000`. `suggest`:: - For suggest operations. Defaults to `fixed` + For suggest operations. Thread pool type is `fixed` with a size of `# of available processors`, queue_size of `1000`. `get`:: - For get operations. Defaults to `fixed` + For get operations. Thread pool type is `fixed` with a size of `# of available processors`, queue_size of `1000`. `bulk`:: - For bulk operations. Defaults to `fixed` + For bulk operations. Thread pool type is `fixed` with a size of `# of available processors`, queue_size of `50`. `percolate`:: - For percolate operations. Defaults to `fixed` + For percolate operations. Thread pool type is `fixed` with a size of `# of available processors`, queue_size of `1000`. `snapshot`:: - For snapshot/restore operations. Defaults to `scaling` with a - keep-alive of `5m` and a size of `min(5, (# of available processors)/2)`, max at 5. + For snapshot/restore operations. Thread pool type is `scaling` with a + keep-alive of `5m` and a size of `min(5, (# of available processors)/2)`. `warmer`:: - For segment warm-up operations. Defaults to `scaling` with a - keep-alive of `5m` and a size of `min(5, (# of available processors)/2)`, max at 5. + For segment warm-up operations. Thread pool type is `scaling` with a + keep-alive of `5m` and a size of `min(5, (# of available processors)/2)`. `refresh`:: - For refresh operations. Defaults to `scaling` with a - keep-alive of `5m` and a size of `min(10, (# of available processors)/2)`, max at 10. + For refresh operations. Thread pool type is `scaling` with a + keep-alive of `5m` and a size of `min(10, (# of available processors)/2)`. `listener`:: Mainly for java client executing of action when listener threaded is set to true. - Default size of `(# of available processors)/2`, max at 10. + Thread pool type is `scaling` with a default size of `min(10, (# of available processors)/2)`. -Changing a specific thread pool can be done by setting its type and -specific type parameters, for example, changing the `index` thread pool -to have more threads: +Changing a specific thread pool can be done by setting its type-specific parameters; for example, changing the `index` +thread pool to have more threads: [source,js] -------------------------------------------------- threadpool: index: - type: fixed size: 30 -------------------------------------------------- -NOTE: you can update threadpool settings live using - <>. - +NOTE: you can update thread pool settings dynamically using <>. [float] [[types]] === Thread pool types -The following are the types of thread pools that can be used and their -respective parameters: +The following are the types of thread pools and their respective parameters: [float] -==== `cache` +==== `cached` -The `cache` thread pool is an unbounded thread pool that will spawn a -thread if there are pending requests. Here is an example of how to set -it: +The `cached` thread pool is an unbounded thread pool that will spawn a +thread if there are pending requests. This thread pool is used to +prevent requests submitted to this pool from blocking or being +rejected. Unused threads in this thread pool will be terminated after +a keep alive expires (defaults to five minutes). The `cached` thread +pool is reserved for the <> thread pool. + +The `keep_alive` parameter determines how long a thread should be kept +around in the thread pool without doing any work. [source,js] -------------------------------------------------- threadpool: - index: - type: cached + generic: + keep_alive: 2m -------------------------------------------------- [float] @@ -111,7 +116,6 @@ full, it will abort the request. -------------------------------------------------- threadpool: index: - type: fixed size: 30 queue_size: 1000 -------------------------------------------------- @@ -130,7 +134,6 @@ around in the thread pool without it doing any work. -------------------------------------------------- threadpool: warmer: - type: scaling size: 8 keep_alive: 2m -------------------------------------------------- diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java index ab2b54633f4..a427b4af4ab 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2Service.java @@ -49,6 +49,7 @@ public interface AwsEc2Service extends LifecycleComponent { public static final String GROUPS = "discovery.ec2.groups"; public static final String TAG_PREFIX = "discovery.ec2.tag."; public static final String AVAILABILITY_ZONES = "discovery.ec2.availability_zones"; + public static final String NODE_CACHE_TIME = "discovery.ec2.node_cache_time"; } AmazonEC2 client(); diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index 26e001c2666..76c3262db3f 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -19,10 +19,13 @@ package org.elasticsearch.cloud.aws; +import com.amazonaws.AmazonClientException; +import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import com.amazonaws.auth.*; import com.amazonaws.internal.StaticCredentialsProvider; +import com.amazonaws.retry.RetryPolicy; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2Client; import org.elasticsearch.ElasticsearchException; @@ -36,6 +39,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import java.util.Locale; +import java.util.Random; /** * @@ -103,6 +107,24 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent } } + // Increase the number of retries in case of 5xx API responses + final Random rand = new Random(); + RetryPolicy retryPolicy = new RetryPolicy( + RetryPolicy.RetryCondition.NO_RETRY_CONDITION, + new RetryPolicy.BackoffStrategy() { + @Override + public long delayBeforeNextRetry(AmazonWebServiceRequest originalRequest, + AmazonClientException exception, + int retriesAttempted) { + // with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000) + logger.warn("EC2 API request failed, retry again. Reason was:", exception); + return 1000L * (long) (10d * Math.pow(2, ((double) retriesAttempted) / 2.0d) * (1.0d + rand.nextDouble())); + } + }, + 10, + false); + clientConfiguration.setRetryPolicy(retryPolicy); + AWSCredentialsProvider credentials; if (account == null && key == null) { @@ -134,6 +156,8 @@ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent endpoint = "ec2.us-west-2.amazonaws.com"; } else if (region.equals("ap-southeast") || region.equals("ap-southeast-1")) { endpoint = "ec2.ap-southeast-1.amazonaws.com"; + } else if (region.equals("us-gov-west") || region.equals("us-gov-west-1")) { + endpoint = "ec2.us-gov-west-1.amazonaws.com"; } else if (region.equals("ap-southeast-2")) { endpoint = "ec2.ap-southeast-2.amazonaws.com"; } else if (region.equals("ap-northeast") || region.equals("ap-northeast-1")) { diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index 94c65047847..f7e70281a3d 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -31,6 +31,8 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.SingleObjectCache; import org.elasticsearch.discovery.zen.ping.unicast.UnicastHostsProvider; import org.elasticsearch.transport.TransportService; @@ -64,6 +66,8 @@ public class AwsEc2UnicastHostsProvider extends AbstractComponent implements Uni private final HostType hostType; + private final DiscoNodesCache discoNodes; + @Inject public AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AwsEc2Service awsEc2Service, Version version) { super(settings); @@ -74,6 +78,9 @@ public class AwsEc2UnicastHostsProvider extends AbstractComponent implements Uni this.hostType = HostType.valueOf(settings.get(DISCOVERY_EC2.HOST_TYPE, "private_ip") .toUpperCase(Locale.ROOT)); + this.discoNodes = new DiscoNodesCache(this.settings.getAsTime(DISCOVERY_EC2.NODE_CACHE_TIME, + TimeValue.timeValueMillis(10_000L))); + this.bindAnyGroup = settings.getAsBoolean(DISCOVERY_EC2.ANY_GROUP, true); this.groups = new HashSet<>(); groups.addAll(Arrays.asList(settings.getAsArray(DISCOVERY_EC2.GROUPS))); @@ -94,6 +101,11 @@ public class AwsEc2UnicastHostsProvider extends AbstractComponent implements Uni @Override public List buildDynamicNodes() { + return discoNodes.getOrRefresh(); + } + + protected List fetchDynamicNodes() { + List discoNodes = new ArrayList<>(); DescribeInstancesResult descInstances; @@ -199,4 +211,25 @@ public class AwsEc2UnicastHostsProvider extends AbstractComponent implements Uni return describeInstancesRequest; } + + private final class DiscoNodesCache extends SingleObjectCache> { + + private boolean empty = true; + + protected DiscoNodesCache(TimeValue refreshInterval) { + super(refreshInterval, new ArrayList<>()); + } + + @Override + protected boolean needsRefresh() { + return (empty || super.needsRefresh()); + } + + @Override + protected List refresh() { + List nodes = fetchDynamicNodes(); + empty = nodes.isEmpty(); + return nodes; + } + } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index ca9493bc4c9..6f88be2be5a 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.local.LocalTransport; import org.junit.AfterClass; import org.junit.Before; @@ -231,4 +232,47 @@ public class Ec2DiscoveryTests extends ESTestCase { assertThat(discoveryNodes, hasSize(prodInstances)); } + abstract class DummyEc2HostProvider extends AwsEc2UnicastHostsProvider { + public int fetchCount = 0; + public DummyEc2HostProvider(Settings settings, TransportService transportService, AwsEc2Service service, Version version) { + super(settings, transportService, service, version); + } + } + + public void testGetNodeListEmptyCache() throws Exception { + AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(Settings.EMPTY, 1, null); + DummyEc2HostProvider provider = new DummyEc2HostProvider(Settings.EMPTY, transportService, awsEc2Service, Version.CURRENT) { + @Override + protected List fetchDynamicNodes() { + fetchCount++; + return new ArrayList<>(); + } + }; + for (int i=0; i<3; i++) { + provider.buildDynamicNodes(); + } + assertThat(provider.fetchCount, is(3)); + } + + public void testGetNodeListCached() throws Exception { + Settings.Builder builder = Settings.settingsBuilder() + .put(DISCOVERY_EC2.NODE_CACHE_TIME, "500ms"); + AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(Settings.EMPTY, 1, null); + DummyEc2HostProvider provider = new DummyEc2HostProvider(builder.build(), transportService, awsEc2Service, Version.CURRENT) { + @Override + protected List fetchDynamicNodes() { + fetchCount++; + return Ec2DiscoveryTests.this.buildDynamicNodes(Settings.EMPTY, 1); + } + }; + for (int i=0; i<3; i++) { + provider.buildDynamicNodes(); + } + assertThat(provider.fetchCount, is(1)); + Thread.sleep(1_000L); // wait for cache to expire + for (int i=0; i<3; i++) { + provider.buildDynamicNodes(); + } + assertThat(provider.fetchCount, is(2)); + } } diff --git a/plugins/lang-javascript/build.gradle b/plugins/lang-javascript/build.gradle index 938d7cc5e73..ead459f29d1 100644 --- a/plugins/lang-javascript/build.gradle +++ b/plugins/lang-javascript/build.gradle @@ -23,7 +23,7 @@ esplugin { } dependencies { - compile 'org.mozilla:rhino:1.7R4' + compile 'org.mozilla:rhino:1.7.7' } compileJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" diff --git a/plugins/lang-javascript/licenses/rhino-1.7.7.jar.sha1 b/plugins/lang-javascript/licenses/rhino-1.7.7.jar.sha1 new file mode 100644 index 00000000000..8c997d41c2b --- /dev/null +++ b/plugins/lang-javascript/licenses/rhino-1.7.7.jar.sha1 @@ -0,0 +1 @@ +3a9ea863b86126b0ed8f2fe2230412747cd3c254 \ No newline at end of file diff --git a/plugins/lang-javascript/licenses/rhino-1.7R4.jar.sha1 b/plugins/lang-javascript/licenses/rhino-1.7R4.jar.sha1 deleted file mode 100644 index 3432f18a5de..00000000000 --- a/plugins/lang-javascript/licenses/rhino-1.7R4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e982f2136574b9a423186fbaeaaa98dc3e5a5288 diff --git a/plugins/lang-javascript/src/main/java/org/elasticsearch/plugin/javascript/JavaScriptPlugin.java b/plugins/lang-javascript/src/main/java/org/elasticsearch/plugin/javascript/JavaScriptPlugin.java index a6832fe7afe..9ca36bd9f86 100644 --- a/plugins/lang-javascript/src/main/java/org/elasticsearch/plugin/javascript/JavaScriptPlugin.java +++ b/plugins/lang-javascript/src/main/java/org/elasticsearch/plugin/javascript/JavaScriptPlugin.java @@ -28,6 +28,11 @@ import org.elasticsearch.script.javascript.JavaScriptScriptEngineService; */ public class JavaScriptPlugin extends Plugin { + static { + // install rhino policy on plugin init + JavaScriptScriptEngineService.init(); + } + @Override public String name() { return "lang-javascript"; diff --git a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java index 70fc5f46d63..621d338128f 100644 --- a/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java +++ b/plugins/lang-javascript/src/main/java/org/elasticsearch/script/javascript/JavaScriptScriptEngineService.java @@ -58,6 +58,34 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements private Scriptable globalScope; + // one time initialization of rhino security manager integration + private static final CodeSource DOMAIN; + static { + try { + DOMAIN = new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } + SecurityController.initGlobal(new PolicySecurityController() { + @Override + public GeneratedClassLoader createClassLoader(ClassLoader parent, Object securityDomain) { + // don't let scripts compile other scripts + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + // check the domain, this is all we allow + if (securityDomain != DOMAIN) { + throw new SecurityException("illegal securityDomain: " + securityDomain); + } + return super.createClassLoader(parent, securityDomain); + } + }); + } + + /** ensures this engine is initialized */ + public static void init() {} + @Inject public JavaScriptScriptEngineService(Settings settings) { super(settings); @@ -100,21 +128,11 @@ public class JavaScriptScriptEngineService extends AbstractComponent implements @Override public Object compile(String script) { - // we don't know why kind of safeguards rhino has, - // but just be safe - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - sm.checkPermission(new SpecialPermission()); - } Context ctx = Context.enter(); try { ctx.setWrapFactory(wrapFactory); ctx.setOptimizationLevel(optimizationLevel); - ctx.setSecurityController(new PolicySecurityController()); - return ctx.compileString(script, generateScriptName(), 1, - new CodeSource(new URL("file:" + BootstrapInfo.UNTRUSTED_CODEBASE), (Certificate[]) null)); - } catch (MalformedURLException e) { - throw new RuntimeException(e); + return ctx.compileString(script, generateScriptName(), 1, DOMAIN); } finally { Context.exit(); } diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java index 887317fb744..410099de0a7 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptSecurityTests.java @@ -23,8 +23,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; -import org.junit.After; -import org.junit.Before; import org.mozilla.javascript.WrappedException; import java.util.HashMap; @@ -37,14 +35,18 @@ public class JavaScriptSecurityTests extends ESTestCase { private JavaScriptScriptEngineService se; - @Before - public void setup() { + @Override + public void setUp() throws Exception { + super.setUp(); se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); + // otherwise will exit your VM and other bad stuff + assumeTrue("test requires security manager to be enabled", System.getSecurityManager() != null); } - @After - public void close() { + @Override + public void tearDown() throws Exception { se.close(); + super.tearDown(); } /** runs a script */ @@ -86,4 +88,13 @@ public class JavaScriptSecurityTests extends ESTestCase { // no files assertFailure("java.io.File.createTempFile(\"test\", \"tmp\")"); } + + public void testDefinitelyNotOK() { + // no mucking with security controller + assertFailure("var ctx = org.mozilla.javascript.Context.getCurrentContext(); " + + "ctx.setSecurityController(new org.mozilla.javascript.PolicySecurityController());"); + // no compiling scripts from scripts + assertFailure("var ctx = org.mozilla.javascript.Context.getCurrentContext(); " + + "ctx.compileString(\"1 + 1\", \"foobar\", 1, null); "); + } } diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java index fd60607e2e6..c4a80a41916 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonSecurityTests.java @@ -23,8 +23,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; -import org.junit.After; -import org.junit.Before; import org.python.core.PyException; import java.util.HashMap; @@ -37,17 +35,21 @@ public class PythonSecurityTests extends ESTestCase { private PythonScriptEngineService se; - @Before - public void setup() { + @Override + public void setUp() throws Exception { + super.setUp(); se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS); + // otherwise will exit your VM and other bad stuff + assumeTrue("test requires security manager to be enabled", System.getSecurityManager() != null); } - @After - public void close() { + @Override + public void tearDown() throws Exception { // We need to clear some system properties System.clearProperty("python.cachedir.skip"); System.clearProperty("python.console.encoding"); se.close(); + super.tearDown(); } /** runs a script */ diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index 81b7e315b69..4752a3f80b2 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -193,6 +193,8 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent