diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 9f54ae8b682..5da5912dabe 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -222,7 +222,11 @@ class BuildPlugin implements Plugin { // IntelliJ does not set JAVA_HOME, so we use the JDK that Gradle was run with return Jvm.current().javaHome } else { - throw new GradleException("JAVA_HOME must be set to build Elasticsearch") + throw new GradleException( + "JAVA_HOME must be set to build Elasticsearch. " + + "Note that if the variable was just set you might have to run `./gradlew --stop` for " + + "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details." + ) } } return javaHome diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java index d61fccb9371..2890257b236 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java @@ -40,7 +40,7 @@ import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments; +import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments; public class RankEvalIT extends ESRestHighLevelClientTestCase { @@ -84,7 +84,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase { Map partialResults = response.getPartialResults(); assertEquals(2, partialResults.size()); EvalQueryQuality amsterdamQueryQuality = partialResults.get("amsterdam_query"); - assertEquals(2, filterUnknownDocuments(amsterdamQueryQuality.getHitsAndRatings()).size()); + assertEquals(2, filterUnratedDocuments(amsterdamQueryQuality.getHitsAndRatings()).size()); List hitsAndRatings = amsterdamQueryQuality.getHitsAndRatings(); assertEquals(7, hitsAndRatings.size()); for (RatedSearchHit hit : hitsAndRatings) { @@ -96,7 +96,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase { } } EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query"); - assertEquals(6, filterUnknownDocuments(berlinQueryQuality.getHitsAndRatings()).size()); + assertEquals(6, filterUnratedDocuments(berlinQueryQuality.getHitsAndRatings()).size()); hitsAndRatings = berlinQueryQuality.getHitsAndRatings(); assertEquals(7, hitsAndRatings.size()); for (RatedSearchHit hit : hitsAndRatings) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 5acc6f5552f..64a344790ca 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -724,8 +724,8 @@ public class RestHighLevelClientTests extends ESTestCase { assertEquals(0, method.getExceptionTypes().length); assertEquals(3, method.getParameterTypes().length); assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request")); - assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName())); - assertThat(method.getParameterTypes()[2].getName(), equalTo(ActionListener.class.getName())); + assertThat(method.getParameterTypes()[1], equalTo(RequestOptions.class)); + assertThat(method.getParameterTypes()[2], equalTo(ActionListener.class)); } else { //A few methods return a boolean rather than a response object if (apiName.equals("ping") || apiName.contains("exist")) { @@ -738,18 +738,23 @@ public class RestHighLevelClientTests extends ESTestCase { //a few methods don't accept a request object as argument if (apiName.equals("ping") || apiName.equals("info")) { assertEquals(1, method.getParameterTypes().length); - assertThat(method.getParameterTypes()[0].getName(), equalTo(RequestOptions.class.getName())); + assertThat(method.getParameterTypes()[0], equalTo(RequestOptions.class)); } else { assertEquals(apiName, 2, method.getParameterTypes().length); assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request")); - assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName())); + assertThat(method.getParameterTypes()[1], equalTo(RequestOptions.class)); } boolean remove = apiSpec.remove(apiName); - if (remove == false && deprecatedMethods.contains(apiName) == false) { - //TODO xpack api are currently ignored, we need to load xpack yaml spec too - if (apiName.startsWith("xpack.") == false) { - apiNotFound.add(apiName); + if (remove == false) { + if (deprecatedMethods.contains(apiName)) { + assertTrue("method [" + method.getName() + "], api [" + apiName + "] should be deprecated", + method.isAnnotationPresent(Deprecated.class)); + } else { + //TODO xpack api are currently ignored, we need to load xpack yaml spec too + if (apiName.startsWith("xpack.") == false) { + apiNotFound.add(apiName); + } } } } diff --git a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java index 7f9bcc6ea08..7bde7fbc06f 100644 --- a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java +++ b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java @@ -21,6 +21,7 @@ package org.elasticsearch.transport.client; import io.netty.util.ThreadDeathWatcher; import io.netty.util.concurrent.GlobalEventExecutor; + import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.NetworkModule; diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 3c6780cba84..0269e4399ce 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -49,7 +49,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os return copySpec { into("elasticsearch-${version}") { into('lib') { - with libFiles + with libFiles(oss) } into('config') { dirMode 0750 diff --git a/distribution/build.gradle b/distribution/build.gradle index 6ffb678cb2b..675799c5b22 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -227,16 +227,24 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { /***************************************************************************** * Common files in all distributions * *****************************************************************************/ - libFiles = copySpec { - // delay by using closures, since they have not yet been configured, so no jar task exists yet - from { project(':server').jar } - from { project(':server').configurations.runtime } - from { project(':libs:plugin-classloader').jar } - from { project(':distribution:tools:java-version-checker').jar } - from { project(':distribution:tools:launchers').jar } - into('tools/plugin-cli') { - from { project(':distribution:tools:plugin-cli').jar } - from { project(':distribution:tools:plugin-cli').configurations.runtime } + libFiles = { oss -> + copySpec { + // delay by using closures, since they have not yet been configured, so no jar task exists yet + from { project(':server').jar } + from { project(':server').configurations.runtime } + from { project(':libs:plugin-classloader').jar } + from { project(':distribution:tools:java-version-checker').jar } + from { project(':distribution:tools:launchers').jar } + into('tools/plugin-cli') { + from { project(':distribution:tools:plugin-cli').jar } + from { project(':distribution:tools:plugin-cli').configurations.runtime } + } + if (oss == false) { + into('tools/security-cli') { + from { project(':x-pack:plugin:security:cli').jar } + from { project(':x-pack:plugin:security:cli').configurations.compile } + } + } } } diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 04fa6313c0a..fcd69138da3 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -126,7 +126,7 @@ Closure commonPackageConfig(String type, boolean oss) { } into('lib') { with copySpec { - with libFiles + with libFiles(oss) // we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine eachFile { FileCopyDetails fcp -> String[] segments = fcp.relativePath.segments diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc index 32516d07bef..688b36042ea 100644 --- a/docs/plugins/ingest-geoip.asciidoc +++ b/docs/plugins/ingest-geoip.asciidoc @@ -26,14 +26,14 @@ include::install_remove.asciidoc[] | `field` | yes | - | The field to get the ip address from for the geographical lookup. | `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. | `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb, GeoLite2-Country.mmdb and GeoLite2-ASN.mmdb files. -| `properties` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup. +| `properties` | no | [`continent_name`, `country_iso_code`, `region_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup. | `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |====== *Depends on what is available in `database_field`: * If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`, -`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` +`country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` and `location`. The fields actually added depend on what has been found and which properties were configured in `properties`. * If the GeoLite2 Country database is used, then the following fields may be added under the `target_field`: `ip`, `country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which properties diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 3991b738e13..79277d22e81 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1732,6 +1732,10 @@ For example, if you have a log message which contains `ip=1.2.3.4 error=REFUSED` | `include_keys` | no | `null` | List of keys to filter and insert into document. Defaults to including all keys | `exclude_keys` | no | `null` | List of keys to exclude from document | `ignore_missing` | no | `false` | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document +| `prefix` | no | `null` | Prefix to be added to extracted keys +| `trim_key` | no | `null` | String of characters to trim from extracted keys +| `trim_value` | no | `null` | String of characters to trim from extracted values +| `strip_brackets` | no | `false` | If `true` strip brackets `()`, `<>`, `[]` as well as quotes `'` and `"` from extracted values |====== diff --git a/docs/reference/mapping/types/alias.asciidoc b/docs/reference/mapping/types/alias.asciidoc index d2b5ccdce8a..b4243d80e6d 100644 --- a/docs/reference/mapping/types/alias.asciidoc +++ b/docs/reference/mapping/types/alias.asciidoc @@ -74,7 +74,7 @@ field alias to query over multiple target fields in a single clause. ==== Unsupported APIs Writes to field aliases are not supported: attempting to use an alias in an index or update request -will result in a failure. Likewise, aliases cannot be used as the target of `copy_to`. +will result in a failure. Likewise, aliases cannot be used as the target of `copy_to` or in multi-fields. Because alias names are not present in the document source, aliases cannot be used when performing source filtering. For example, the following request will return an empty result for `_source`: diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index f7b6f9b2e00..689b941ef6b 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -79,3 +79,11 @@ the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in ==== The deprecated stored script contexts have now been removed When putting stored scripts, support for storing them with the deprecated `template` context or without a context is now removed. Scripts must be stored using the `script` context as mentioned in the documentation. + +==== Get Aliases API limitations when {security} is enabled removed + +The behavior and response codes of the get aliases API no longer vary +depending on whether {security} is enabled. Previously a +404 - NOT FOUND (IndexNotFoundException) could be returned in case the +current user was not authorized for any alias. An empty response with +status 200 - OK is now returned instead at all times. diff --git a/docs/reference/search/rank-eval.asciidoc b/docs/reference/search/rank-eval.asciidoc index 571a4886991..cf13b9f7b06 100644 --- a/docs/reference/search/rank-eval.asciidoc +++ b/docs/reference/search/rank-eval.asciidoc @@ -274,7 +274,7 @@ that shows potential errors of individual queries. The response has the followin "details": { "my_query_id1": { <2> "quality_level": 0.6, <3> - "unknown_docs": [ <4> + "unrated_docs": [ <4> { "_index": "my_index", "_id": "1960795" @@ -309,7 +309,7 @@ that shows potential errors of individual queries. The response has the followin <1> the overall evaluation quality calculated by the defined metric <2> the `details` section contains one entry for every query in the original `requests` section, keyed by the search request id <3> the `quality_level` in the `details` section shows the contribution of this query to the global quality score -<4> the `unknown_docs` section contains an `_index` and `_id` entry for each document in the search result for this +<4> the `unrated_docs` section contains an `_index` and `_id` entry for each document in the search result for this query that didn't have a ratings value. This can be used to ask the user to supply ratings for these documents <5> the `hits` section shows a grouping of the search results with their supplied rating <6> the `metric_details` give additional information about the calculated quality metric (e.g. how many of the retrieved diff --git a/docs/reference/settings/monitoring-settings.asciidoc b/docs/reference/settings/monitoring-settings.asciidoc index 5c812b6f53c..2759944e615 100644 --- a/docs/reference/settings/monitoring-settings.asciidoc +++ b/docs/reference/settings/monitoring-settings.asciidoc @@ -85,10 +85,6 @@ You can update this setting through the Sets the timeout for collecting index statistics. Defaults to `10s`. -`xpack.monitoring.collection.indices.stats.timeout`:: - -Sets the timeout for collecting total indices statistics. Defaults to `10s`. - `xpack.monitoring.collection.index.recovery.active_only`:: Controls whether or not all recoveries are collected. Set to `true` to diff --git a/docs/reference/upgrade/cluster_restart.asciidoc b/docs/reference/upgrade/cluster_restart.asciidoc index b092e45ae29..06d5e96f8ef 100644 --- a/docs/reference/upgrade/cluster_restart.asciidoc +++ b/docs/reference/upgrade/cluster_restart.asciidoc @@ -47,6 +47,8 @@ include::set-paths-tip.asciidoc[] Use the `elasticsearch-plugin` script to install the upgraded version of each installed Elasticsearch plugin. All plugins must be upgraded when you upgrade a node. ++ +include::remove-xpack.asciidoc[] . *Start each upgraded node.* + @@ -91,7 +93,7 @@ already have local shard copies. + -- When all nodes have joined the cluster and recovered their primary shards, -reenable allocation by restoring `cluster.routing.allocation.enable` to its +reenable allocation by restoring `cluster.routing.allocation.enable` to its default: [source,js] @@ -123,4 +125,4 @@ GET _cat/recovery // CONSOLE -- -. *Restart machine learning jobs.* +. *Restart machine learning jobs.* diff --git a/docs/reference/upgrade/remove-xpack.asciidoc b/docs/reference/upgrade/remove-xpack.asciidoc new file mode 100644 index 00000000000..9d4c4c9f779 --- /dev/null +++ b/docs/reference/upgrade/remove-xpack.asciidoc @@ -0,0 +1,4 @@ +IMPORTANT: If you use {xpack} and are upgrading from a version prior to 6.3, +remove {xpack} before restarting: `bin/elasticsearch-plugin remove x-pack`. As +of 6.3, {xpack} is included in the default distribution. The node will fail to +start if the old {xpack} plugin is present. diff --git a/docs/reference/upgrade/rolling_upgrade.asciidoc b/docs/reference/upgrade/rolling_upgrade.asciidoc index 76a10f752be..e2edb6b2922 100644 --- a/docs/reference/upgrade/rolling_upgrade.asciidoc +++ b/docs/reference/upgrade/rolling_upgrade.asciidoc @@ -53,6 +53,8 @@ include::set-paths-tip.asciidoc[] Use the `elasticsearch-plugin` script to install the upgraded version of each installed Elasticsearch plugin. All plugins must be upgraded when you upgrade a node. ++ +include::remove-xpack.asciidoc[] . *Start the upgraded node.* + @@ -144,7 +146,7 @@ for each node that needs to be updated. -- -. *Restart machine learning jobs.* +. *Restart machine learning jobs.* [IMPORTANT] ==================================================== diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java index 8ff0cfcd0c8..af4eabefd94 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/ChannelFactoryTests.java @@ -137,7 +137,6 @@ public class ChannelFactoryTests extends ESTestCase { super(rawChannelFactory); } - @SuppressWarnings("unchecked") @Override public NioSocketChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { NioSocketChannel nioSocketChannel = new NioSocketChannel(channel); diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java index 0cc3aa04800..6e1e34ec1f5 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/EventHandlerTests.java @@ -120,7 +120,6 @@ public class EventHandlerTests extends ESTestCase { verify(channelFactory, times(2)).acceptNioChannel(same(serverContext), same(selectorSupplier)); } - @SuppressWarnings("unchecked") public void testHandleAcceptCallsServerAcceptCallback() throws IOException { NioSocketChannel childChannel = new NioSocketChannel(mock(SocketChannel.class)); SocketChannelContext childContext = mock(SocketChannelContext.class); diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java index bc9a7c33f0f..9dbf483107b 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java @@ -275,7 +275,6 @@ public class SocketChannelContextTests extends ESTestCase { } } - @SuppressWarnings("unchecked") public void testCloseClosesChannelBuffer() throws IOException { try (SocketChannel realChannel = SocketChannel.open()) { when(channel.getRawChannel()).thenReturn(realChannel); diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java index 70e2172ce92..b5a348f45eb 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MultiPassStats.java @@ -43,7 +43,6 @@ class MultiPassStats { this.fieldBKey = fieldBName; } - @SuppressWarnings("unchecked") void computeStats(final List fieldA, final List fieldB) { // set count count = fieldA.size(); diff --git a/modules/ingest-common/build.gradle b/modules/ingest-common/build.gradle index 424c1197da3..4f35bbee28d 100644 --- a/modules/ingest-common/build.gradle +++ b/modules/ingest-common/build.gradle @@ -20,11 +20,17 @@ esplugin { description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources' classname 'org.elasticsearch.ingest.common.IngestCommonPlugin' + extendedPlugins = ['lang-painless'] } dependencies { + compileOnly project(':modules:lang-painless') compile project(':libs:grok') } compileJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes" compileTestJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes" + +integTestCluster { + module project(':modules:lang-painless') +} \ No newline at end of file diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java index dfe9a054acf..d07b56e1b3d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/BytesProcessor.java @@ -35,9 +35,13 @@ public final class BytesProcessor extends AbstractStringProcessor { super(processorTag, field, ignoreMissing, targetField); } + public static long apply(String value) { + return ByteSizeValue.parseBytesSizeValue(value, null, "Ingest Field").getBytes(); + } + @Override protected Long process(String value) { - return ByteSizeValue.parseBytesSizeValue(value, null, getField()).getBytes(); + return apply(value); } @Override diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java index 2f217735df2..c0a9d37abda 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java @@ -67,13 +67,11 @@ public final class JsonProcessor extends AbstractProcessor { return addToRoot; } - @Override - public void execute(IngestDocument document) throws Exception { - Object fieldValue = document.getFieldValue(field, Object.class); - BytesReference bytesRef = (fieldValue == null) ? new BytesArray("null") : new BytesArray(fieldValue.toString()); + public static Object apply(Object fieldValue) { + BytesReference bytesRef = fieldValue == null ? new BytesArray("null") : new BytesArray(fieldValue.toString()); try (InputStream stream = bytesRef.streamInput(); XContentParser parser = JsonXContent.jsonXContent - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) { + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) { XContentParser.Token token = parser.nextToken(); Object value = null; if (token == XContentParser.Token.VALUE_NULL) { @@ -91,20 +89,32 @@ public final class JsonProcessor extends AbstractProcessor { } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { throw new IllegalArgumentException("cannot read binary value"); } - if (addToRoot && (value instanceof Map)) { - for (Map.Entry entry : ((Map) value).entrySet()) { - document.setFieldValue(entry.getKey(), entry.getValue()); - } - } else if (addToRoot) { - throw new IllegalArgumentException("cannot add non-map fields to root of document"); - } else { - document.setFieldValue(targetField, value); - } + return value; } catch (IOException e) { throw new IllegalArgumentException(e); } } + public static void apply(Map ctx, String fieldName) { + Object value = apply(ctx.get(fieldName)); + if (value instanceof Map) { + @SuppressWarnings("unchecked") + Map map = (Map) value; + ctx.putAll(map); + } else { + throw new IllegalArgumentException("cannot add non-map fields to root of document"); + } + } + + @Override + public void execute(IngestDocument document) throws Exception { + if (addToRoot) { + apply(document.getSourceAndMetadata(), field); + } else { + document.setFieldValue(targetField, apply(document.getFieldValue(field, Object.class))); + } + } + @Override public String getType() { return TYPE; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java index 6ed065926d6..9cce3cedf3d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/KeyValueProcessor.java @@ -25,11 +25,14 @@ import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.regex.Pattern; /** * The KeyValueProcessor parses and extracts messages of the `key=value` variety into fields with values of the keys. @@ -38,6 +41,8 @@ public final class KeyValueProcessor extends AbstractProcessor { public static final String TYPE = "kv"; + private static final Pattern STRIP_BRACKETS = Pattern.compile("(^[\\(\\[<\"'])|([\\]\\)>\"']$)"); + private final String field; private final String fieldSplit; private final String valueSplit; @@ -45,9 +50,11 @@ public final class KeyValueProcessor extends AbstractProcessor { private final Set excludeKeys; private final String targetField; private final boolean ignoreMissing; + private final Consumer execution; KeyValueProcessor(String tag, String field, String fieldSplit, String valueSplit, Set includeKeys, - Set excludeKeys, String targetField, boolean ignoreMissing) { + Set excludeKeys, String targetField, boolean ignoreMissing, + String trimKey, String trimValue, boolean stripBrackets, String prefix) { super(tag); this.field = field; this.targetField = targetField; @@ -56,6 +63,92 @@ public final class KeyValueProcessor extends AbstractProcessor { this.includeKeys = includeKeys; this.excludeKeys = excludeKeys; this.ignoreMissing = ignoreMissing; + this.execution = buildExecution( + fieldSplit, valueSplit, field, includeKeys, excludeKeys, targetField, ignoreMissing, trimKey, trimValue, + stripBrackets, prefix + ); + } + + private static Consumer buildExecution(String fieldSplit, String valueSplit, String field, + Set includeKeys, Set excludeKeys, + String targetField, boolean ignoreMissing, + String trimKey, String trimValue, boolean stripBrackets, + String prefix) { + final Predicate keyFilter; + if (includeKeys == null) { + if (excludeKeys == null) { + keyFilter = key -> true; + } else { + keyFilter = key -> excludeKeys.contains(key) == false; + } + } else { + if (excludeKeys == null) { + keyFilter = includeKeys::contains; + } else { + keyFilter = key -> includeKeys.contains(key) && excludeKeys.contains(key) == false; + } + } + final String fieldPathPrefix; + String keyPrefix = prefix == null ? "" : prefix; + if (targetField == null) { + fieldPathPrefix = keyPrefix; + } else { + fieldPathPrefix = targetField + "." + keyPrefix; + } + final Function keyPrefixer; + if (fieldPathPrefix.isEmpty()) { + keyPrefixer = val -> val; + } else { + keyPrefixer = val -> fieldPathPrefix + val; + } + final Function fieldSplitter = buildSplitter(fieldSplit, true); + Function valueSplitter = buildSplitter(valueSplit, false); + final Function keyTrimmer = buildTrimmer(trimKey); + final Function bracketStrip; + if (stripBrackets) { + bracketStrip = val -> STRIP_BRACKETS.matcher(val).replaceAll(""); + } else { + bracketStrip = val -> val; + } + final Function valueTrimmer = buildTrimmer(trimValue); + return document -> { + String value = document.getFieldValue(field, String.class, ignoreMissing); + if (value == null) { + if (ignoreMissing) { + return; + } + throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs."); + } + for (String part : fieldSplitter.apply(value)) { + String[] kv = valueSplitter.apply(part); + if (kv.length != 2) { + throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]"); + } + String key = keyTrimmer.apply(kv[0]); + if (keyFilter.test(key)) { + append(document, keyPrefixer.apply(key), valueTrimmer.apply(bracketStrip.apply(kv[1]))); + } + } + }; + } + + private static Function buildTrimmer(String trim) { + if (trim == null) { + return val -> val; + } else { + Pattern pattern = Pattern.compile("(^([" + trim + "]+))|([" + trim + "]+$)"); + return val -> pattern.matcher(val).replaceAll(""); + } + } + + private static Function buildSplitter(String split, boolean fields) { + int limit = fields ? 0 : 2; + if (split.length() > 2 || split.length() == 2 && split.charAt(0) != '\\') { + Pattern splitPattern = Pattern.compile(split); + return val -> splitPattern.split(val, limit); + } else { + return val -> val.split(split, limit); + } } String getField() { @@ -86,7 +179,7 @@ public final class KeyValueProcessor extends AbstractProcessor { return ignoreMissing; } - public void append(IngestDocument document, String targetField, String value) { + private static void append(IngestDocument document, String targetField, String value) { if (document.hasField(targetField)) { document.appendFieldValue(targetField, value); } else { @@ -96,27 +189,7 @@ public final class KeyValueProcessor extends AbstractProcessor { @Override public void execute(IngestDocument document) { - String oldVal = document.getFieldValue(field, String.class, ignoreMissing); - - if (oldVal == null && ignoreMissing) { - return; - } else if (oldVal == null) { - throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs."); - } - - String fieldPathPrefix = (targetField == null) ? "" : targetField + "."; - Arrays.stream(oldVal.split(fieldSplit)) - .map((f) -> { - String[] kv = f.split(valueSplit, 2); - if (kv.length != 2) { - throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]"); - } - return kv; - }) - .filter((p) -> - (includeKeys == null || includeKeys.contains(p[0])) && - (excludeKeys == null || excludeKeys.contains(p[0]) == false)) - .forEach((p) -> append(document, fieldPathPrefix + p[0], p[1])); + execution.accept(document); } @Override @@ -132,6 +205,11 @@ public final class KeyValueProcessor extends AbstractProcessor { String targetField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "target_field"); String fieldSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field_split"); String valueSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "value_split"); + String trimKey = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_key"); + String trimValue = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_value"); + String prefix = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "prefix"); + boolean stripBrackets = + ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "strip_brackets", false); Set includeKeys = null; Set excludeKeys = null; List includeKeysList = ConfigurationUtils.readOptionalList(TYPE, processorTag, config, "include_keys"); @@ -143,7 +221,10 @@ public final class KeyValueProcessor extends AbstractProcessor { excludeKeys = Collections.unmodifiableSet(Sets.newHashSet(excludeKeysList)); } boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); - return new KeyValueProcessor(processorTag, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing); + return new KeyValueProcessor( + processorTag, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing, + trimKey, trimValue, stripBrackets, prefix + ); } } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java index aef8b0cce24..4269cb05257 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java @@ -35,9 +35,13 @@ public final class LowercaseProcessor extends AbstractStringProcessor { super(processorTag, field, ignoreMissing, targetField); } + public static String apply(String value) { + return value.toLowerCase(Locale.ROOT); + } + @Override protected String process(String value) { - return value.toLowerCase(Locale.ROOT); + return apply(value); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/settings/IndexDynamicSettings.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/Processors.java similarity index 52% rename from server/src/main/java/org/elasticsearch/index/settings/IndexDynamicSettings.java rename to modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/Processors.java index 0f686efe926..8a0b1529892 100644 --- a/server/src/main/java/org/elasticsearch/index/settings/IndexDynamicSettings.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/Processors.java @@ -17,23 +17,33 @@ * under the License. */ -package org.elasticsearch.index.settings; +package org.elasticsearch.ingest.common; -import org.elasticsearch.common.inject.BindingAnnotation; +import java.util.Map; -import java.lang.annotation.Documented; -import java.lang.annotation.Retention; -import java.lang.annotation.Target; +public final class Processors { -import static java.lang.annotation.ElementType.FIELD; -import static java.lang.annotation.ElementType.PARAMETER; -import static java.lang.annotation.RetentionPolicy.RUNTIME; + public static long bytes(String value) { + return BytesProcessor.apply(value); + } + public static String lowercase(String value) { + return LowercaseProcessor.apply(value); + } -@BindingAnnotation -@Target({FIELD, PARAMETER}) -@Retention(RUNTIME) -@Documented -public @interface IndexDynamicSettings { + public static String uppercase(String value) { + return UppercaseProcessor.apply(value); + } + public static Object json(Object fieldValue) { + return JsonProcessor.apply(fieldValue); + } + + public static void json(Map ctx, String field) { + JsonProcessor.apply(ctx, field); + } + + public static String urlDecode(String value) { + return URLDecodeProcessor.apply(value); + } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ProcessorsWhitelistExtension.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ProcessorsWhitelistExtension.java new file mode 100644 index 00000000000..ced84057c7a --- /dev/null +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ProcessorsWhitelistExtension.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.painless.spi.PainlessExtension; +import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.painless.spi.WhitelistLoader; +import org.elasticsearch.script.IngestScript; +import org.elasticsearch.script.ScriptContext; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class ProcessorsWhitelistExtension implements PainlessExtension { + + private static final Whitelist WHITELIST = + WhitelistLoader.loadFromResourceFiles(ProcessorsWhitelistExtension.class, "processors_whitelist.txt"); + + @Override + public Map, List> getContextWhitelists() { + return Collections.singletonMap(IngestScript.CONTEXT, Collections.singletonList(WHITELIST)); + } +} diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java index 945419499ad..fb6c5acf98b 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/URLDecodeProcessor.java @@ -34,15 +34,19 @@ public final class URLDecodeProcessor extends AbstractStringProcessor { super(processorTag, field, ignoreMissing, targetField); } - @Override - protected String process(String value) { + public static String apply(String value) { try { return URLDecoder.decode(value, "UTF-8"); } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("could not URL-decode field[" + getField() + "]", e); + throw new IllegalArgumentException("Could not URL-decode value.", e); } } + @Override + protected String process(String value) { + return apply(value); + } + @Override public String getType() { return TYPE; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java index af93f06a8f2..6c428627c7d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java @@ -34,9 +34,13 @@ public final class UppercaseProcessor extends AbstractStringProcessor { super(processorTag, field, ignoreMissing, targetField); } + public static String apply(String value) { + return value.toUpperCase(Locale.ROOT); + } + @Override protected String process(String value) { - return value.toUpperCase(Locale.ROOT); + return apply(value); } @Override diff --git a/modules/ingest-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension b/modules/ingest-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension new file mode 100644 index 00000000000..8a98f034be5 --- /dev/null +++ b/modules/ingest-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension @@ -0,0 +1 @@ +org.elasticsearch.ingest.common.ProcessorsWhitelistExtension \ No newline at end of file diff --git a/modules/ingest-common/src/main/resources/org/elasticsearch/ingest/common/processors_whitelist.txt b/modules/ingest-common/src/main/resources/org/elasticsearch/ingest/common/processors_whitelist.txt new file mode 100644 index 00000000000..3d93b19f066 --- /dev/null +++ b/modules/ingest-common/src/main/resources/org/elasticsearch/ingest/common/processors_whitelist.txt @@ -0,0 +1,29 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# This file contains a whitelist of static processor methods that can be accessed from painless + +class org.elasticsearch.ingest.common.Processors { + long bytes(String) + String lowercase(String) + String uppercase(String) + Object json(Object) + void json(Map, String) + String urlDecode(String) +} \ No newline at end of file diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java index 0da3434adf1..788340a455a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/BytesProcessorTests.java @@ -63,7 +63,7 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase { Processor processor = newProcessor(fieldName, randomBoolean(), fieldName); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), - CoreMatchers.equalTo("failed to parse setting [" + fieldName + "] with value [8912pb] as a size in bytes")); + CoreMatchers.equalTo("failed to parse setting [Ingest Field] with value [8912pb] as a size in bytes")); assertThat(exception.getCause().getMessage(), CoreMatchers.containsString("Values greater than 9223372036854775807 bytes are not supported")); } @@ -93,6 +93,6 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase { processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(1126L)); assertWarnings("Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " + - "[" + fieldName + "]"); + "[Ingest Field]"); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index 2867ed1d240..099e8e1866b 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -146,7 +146,6 @@ public class JsonProcessorTests extends ESTestCase { assertThat(exception.getMessage(), equalTo("field [field] not present as part of path [field]")); } - @SuppressWarnings("unchecked") public void testAddToRoot() throws Exception { String processorTag = randomAlphaOfLength(3); String randomTargetField = randomAlphaOfLength(2); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java index 380af44c251..591f9994c60 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/KeyValueProcessorTests.java @@ -25,19 +25,25 @@ import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.Set; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.equalTo; public class KeyValueProcessorTests extends ESTestCase { + private static final KeyValueProcessor.Factory FACTORY = new KeyValueProcessor.Factory(); + public void test() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, null, "target", false); + Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe"))); @@ -46,7 +52,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testRootTarget() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "myField", "&", "=", null, null,null, false); + Processor processor = createKvProcessor("myField", "&", "=", null, null,null, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("first", String.class), equalTo("hello")); assertThat(ingestDocument.getFieldValue("second", List.class), equalTo(Arrays.asList("world", "universe"))); @@ -55,7 +61,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testKeySameAsSourceField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); ingestDocument.setFieldValue("first", "first=hello"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "first", "&", "=", null, null,null, false); + Processor processor = createKvProcessor("first", "&", "=", null, null,null, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("first", List.class), equalTo(Arrays.asList("first=hello", "hello"))); } @@ -63,7 +69,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testIncludeKeys() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", + Processor processor = createKvProcessor(fieldName, "&", "=", Sets.newHashSet("first"), null, "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); @@ -73,7 +79,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testExcludeKeys() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", + Processor processor = createKvProcessor(fieldName, "&", "=", null, Sets.newHashSet("second"), "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); @@ -84,7 +90,7 @@ public class KeyValueProcessorTests extends ESTestCase { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe&third=bar"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", + Processor processor = createKvProcessor(fieldName, "&", "=", Sets.newHashSet("first", "second"), Sets.newHashSet("first", "second"), "target", false); processor.execute(ingestDocument); assertFalse(ingestDocument.hasField("target.first")); @@ -92,9 +98,9 @@ public class KeyValueProcessorTests extends ESTestCase { assertFalse(ingestDocument.hasField("target.third")); } - public void testMissingField() { + public void testMissingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "&", + Processor processor = createKvProcessor("unknown", "&", "=", null, null, "target", false); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]")); @@ -105,7 +111,7 @@ public class KeyValueProcessorTests extends ESTestCase { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap(fieldName, null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "", "", null, null, "target", true); + Processor processor = createKvProcessor(fieldName, "", "", null, null, "target", true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -113,7 +119,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testNonExistentWithIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "", "", null, null, "target", true); + Processor processor = createKvProcessor("unknown", "", "", null, null, "target", true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); } @@ -121,7 +127,7 @@ public class KeyValueProcessorTests extends ESTestCase { public void testFailFieldSplitMatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello|second=world|second=universe"); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, null, "target", false); + Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello|second=world|second=universe")); assertFalse(ingestDocument.hasField("target.second")); @@ -129,8 +135,94 @@ public class KeyValueProcessorTests extends ESTestCase { public void testFailValueSplitMatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("foo", "bar")); - Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "foo", "&", "=", null, null, "target", false); + Processor processor = createKvProcessor("foo", "&", "=", null, null, "target", false); Exception exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [foo] does not contain value_split [=]")); } + + public void testTrimKeyAndValue() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first= hello &second=world& second =universe"); + Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, " ", " ", false, null); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); + assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe"))); + } + + public void testTrimMultiCharSequence() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, + "to=, orig_to=, %+relay=mail.example.com[private/dovecot-lmtp]," + + " delay=2.2, delays=1.9/0.01/0.01/0.21, dsn=2.0.0, status=sent " + ); + Processor processor = createKvProcessor(fieldName, " ", "=", null, null, "target", false, "%+", "<>,", false, null); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target.to", String.class), equalTo("foo@example.com")); + assertThat(ingestDocument.getFieldValue("target.orig_to", String.class), equalTo("bar@example.com")); + assertThat(ingestDocument.getFieldValue("target.relay", String.class), equalTo("mail.example.com[private/dovecot-lmtp]")); + assertThat(ingestDocument.getFieldValue("target.delay", String.class), equalTo("2.2")); + assertThat(ingestDocument.getFieldValue("target.delays", String.class), equalTo("1.9/0.01/0.01/0.21")); + assertThat(ingestDocument.getFieldValue("target.dsn", String.class), equalTo("2.0.0")); + assertThat(ingestDocument.getFieldValue("target.status", String.class), equalTo("sent")); + } + + public void testStripBrackets() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField( + random(), ingestDocument, "first=&second=\"world\"&second=(universe)&third=&fourth=[bar]&fifth='last'" + ); + Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, true, null); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello")); + assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe"))); + assertThat(ingestDocument.getFieldValue("target.third", String.class), equalTo("foo")); + assertThat(ingestDocument.getFieldValue("target.fourth", String.class), equalTo("bar")); + assertThat(ingestDocument.getFieldValue("target.fifth", String.class), equalTo("last")); + } + + public void testAddPrefix() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe"); + Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, false, "arg_"); + processor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target.arg_first", String.class), equalTo("hello")); + assertThat(ingestDocument.getFieldValue("target.arg_second", List.class), equalTo(Arrays.asList("world", "universe"))); + } + + private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set includeKeys, + Set excludeKeys, String targetField, + boolean ignoreMissing) throws Exception { + return createKvProcessor( + field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing, null, null, false, null + ); + } + + private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set includeKeys, + Set excludeKeys, String targetField, boolean ignoreMissing, + String trimKey, String trimValue, boolean stripBrackets, + String prefix) throws Exception { + Map config = new HashMap<>(); + config.put("field", field); + config.put("field_split", fieldSplit); + config.put("value_split", valueSplit); + config.put("target_field", targetField); + if (includeKeys != null) { + config.put("include_keys", new ArrayList<>(includeKeys)); + } + if (excludeKeys != null) { + config.put("exclude_keys", new ArrayList<>(excludeKeys)); + } + config.put("ignore_missing", ignoreMissing); + if (trimKey != null) { + config.put("trim_key", trimKey); + } + if (trimValue != null) { + config.put("trim_value", trimValue); + } + config.put("strip_brackets", stripBrackets); + if (prefix != null) { + config.put("prefix", prefix); + } + return FACTORY.create(null, randomAlphaOfLength(10), config); + } } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/190_script_processor.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/190_script_processor.yml new file mode 100644 index 00000000000..bd55b764a95 --- /dev/null +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/190_script_processor.yml @@ -0,0 +1,216 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test invoke bytes processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.bytes(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "1kb"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "1kb" } + - match: { _source.target_field: 1024 } + +--- +"Test invoke lowercase processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.lowercase(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "FooBar"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "FooBar" } + - match: { _source.target_field: "foobar" } + +--- +"Test invoke uppercase processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.uppercase(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "FooBar"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "FooBar" } + - match: { _source.target_field: "FOOBAR" } + +--- +"Test invoke json processor, assign to field": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.json(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "{\"foo\":\"bar\"}"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "{\"foo\":\"bar\"}" } + - match: { _source.target_field.foo: "bar" } + +--- +"Test invoke json processor, assign to root": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "Processors.json(ctx, 'source_field')" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "{\"foo\":\"bar\"}"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "{\"foo\":\"bar\"}" } + - match: { _source.foo: "bar" } + +--- +"Test invoke urlDecode processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.urlDecode(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "foo%20bar"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.source_field: "foo%20bar" } + - match: { _source.target_field: "foo bar" } diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index 6495659d9cd..8491d15c27e 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -174,4 +174,4 @@ class org.elasticsearch.index.similarity.ScriptedSimilarity$Term { class org.elasticsearch.index.similarity.ScriptedSimilarity$Doc { int getLength() float getFreq() -} +} \ No newline at end of file diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java index 5d881632dee..d0d0b2165ca 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/InitializerTests.java @@ -26,7 +26,7 @@ import java.util.Map; public class InitializerTests extends ScriptTestCase { - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"rawtypes"}) public void testArrayInitializers() { int[] ints = (int[])exec("new int[] {}"); @@ -59,7 +59,7 @@ public class InitializerTests extends ScriptTestCase { assertEquals("aaaaaa", objects[3]); } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"rawtypes"}) public void testListInitializers() { List list = (List)exec("[]"); @@ -91,7 +91,7 @@ public class InitializerTests extends ScriptTestCase { assertEquals("aaaaaa", list.get(3)); } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"rawtypes"}) public void testMapInitializers() { Map map = (Map)exec("[:]"); diff --git a/modules/mapper-extras/src/test/resources/org/elasticsearch/index/mapper/filebeat-6.0.template.json b/modules/mapper-extras/src/test/resources/org/elasticsearch/index/mapper/filebeat-6.0.template.json index 0129509b5c0..fbaaf92351c 100644 --- a/modules/mapper-extras/src/test/resources/org/elasticsearch/index/mapper/filebeat-6.0.template.json +++ b/modules/mapper-extras/src/test/resources/org/elasticsearch/index/mapper/filebeat-6.0.template.json @@ -1,7 +1,5 @@ { - "index_patterns": [ - "filebeat-6.0.0-*" - ], + "index_patterns": ["filebeat-6.0.0-*"], "mappings": { "doc": { "_meta": { @@ -67,12 +65,14 @@ "type": "keyword" }, "country_iso_code": { - "ignore_above": 1024, "type": "keyword" }, "location": { "type": "geo_point" }, + "region_iso_code": { + "type": "keyword" + }, "region_name": { "ignore_above": 1024, "type": "keyword" diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java index 2ad3e589bd8..91ba1ce6169 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvalQueryQuality.java @@ -102,8 +102,8 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(queryId); builder.field(QUALITY_LEVEL_FIELD.getPreferredName(), this.evaluationResult); - builder.startArray(UNKNOWN_DOCS_FIELD.getPreferredName()); - for (DocumentKey key : EvaluationMetric.filterUnknownDocuments(ratedHits)) { + builder.startArray(UNRATED_DOCS_FIELD.getPreferredName()); + for (DocumentKey key : EvaluationMetric.filterUnratedDocuments(ratedHits)) { builder.startObject(); builder.field(RatedDocument.INDEX_FIELD.getPreferredName(), key.getIndex()); builder.field(RatedDocument.DOC_ID_FIELD.getPreferredName(), key.getDocId()); @@ -123,7 +123,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable { } private static final ParseField QUALITY_LEVEL_FIELD = new ParseField("quality_level"); - private static final ParseField UNKNOWN_DOCS_FIELD = new ParseField("unknown_docs"); + private static final ParseField UNRATED_DOCS_FIELD = new ParseField("unrated_docs"); private static final ParseField HITS_FIELD = new ParseField("hits"); private static final ParseField METRIC_DETAILS_FIELD = new ParseField("metric_details"); private static final ObjectParser PARSER = new ObjectParser<>("eval_query_quality", diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java index c67511e051f..37898fd9516 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/EvaluationMetric.java @@ -76,10 +76,9 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable { /** * filter @link {@link RatedSearchHit} that don't have a rating */ - static List filterUnknownDocuments(List ratedHits) { - List unknownDocs = ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false) + static List filterUnratedDocuments(List ratedHits) { + return ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false) .map(hit -> new DocumentKey(hit.getSearchHit().getIndex(), hit.getSearchHit().getId())).collect(Collectors.toList()); - return unknownDocs; } /** diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java index 56b0c692c41..e768c297333 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -40,7 +40,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments; +import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.hamcrest.CoreMatchers.containsString; @@ -128,7 +128,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase { DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, rated); assertEquals(12.779642067948913, result.getQualityLevel(), DELTA); - assertEquals(2, filterUnknownDocuments(result.getHitsAndRatings()).size()); + assertEquals(2, filterUnratedDocuments(result.getHitsAndRatings()).size()); /** * Check with normalization: to get the maximal possible dcg, sort documents by @@ -185,7 +185,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase { DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs); assertEquals(12.392789260714371, result.getQualityLevel(), DELTA); - assertEquals(1, filterUnknownDocuments(result.getHitsAndRatings()).size()); + assertEquals(1, filterUnratedDocuments(result.getHitsAndRatings()).size()); /** * Check with normalization: to get the maximal possible dcg, sort documents by @@ -224,13 +224,13 @@ public class DiscountedCumulativeGainTests extends ESTestCase { DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs); assertEquals(0.0d, result.getQualityLevel(), DELTA); - assertEquals(0, filterUnknownDocuments(result.getHitsAndRatings()).size()); + assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size()); // also check normalized dcg = new DiscountedCumulativeGain(true, null, 10); result = dcg.evaluate("id", hits, ratedDocs); assertEquals(0.0d, result.getQualityLevel(), DELTA); - assertEquals(0, filterUnknownDocuments(result.getHitsAndRatings()).size()); + assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size()); } public void testParseFromXContent() throws IOException { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java index e9fae6b5c63..c9251bb8090 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; -import org.elasticsearch.index.rankeval.RatedDocument.DocumentKey; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; @@ -52,11 +51,6 @@ public class EvalQueryQualityTests extends ESTestCase { } public static EvalQueryQuality randomEvalQueryQuality() { - List unknownDocs = new ArrayList<>(); - int numberOfUnknownDocs = randomInt(5); - for (int i = 0; i < numberOfUnknownDocs; i++) { - unknownDocs.add(new DocumentKey(randomAlphaOfLength(10), randomAlphaOfLength(10))); - } int numberOfSearchHits = randomInt(5); List ratedHits = new ArrayList<>(); for (int i = 0; i < numberOfSearchHits; i++) { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java index b55c57bae2b..28200e7d5a0 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java @@ -40,7 +40,7 @@ import java.util.List; import java.util.Map.Entry; import java.util.Set; -import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments; +import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.instanceOf; @@ -120,7 +120,7 @@ public class RankEvalRequestIT extends ESIntegTestCase { for (Entry entry : entrySet) { EvalQueryQuality quality = entry.getValue(); if (entry.getKey() == "amsterdam_query") { - assertEquals(2, filterUnknownDocuments(quality.getHitsAndRatings()).size()); + assertEquals(2, filterUnratedDocuments(quality.getHitsAndRatings()).size()); List hitsAndRatings = quality.getHitsAndRatings(); assertEquals(6, hitsAndRatings.size()); for (RatedSearchHit hit : hitsAndRatings) { @@ -133,7 +133,7 @@ public class RankEvalRequestIT extends ESIntegTestCase { } } if (entry.getKey() == "berlin_query") { - assertEquals(5, filterUnknownDocuments(quality.getHitsAndRatings()).size()); + assertEquals(5, filterUnratedDocuments(quality.getHitsAndRatings()).size()); List hitsAndRatings = quality.getHitsAndRatings(); assertEquals(6, hitsAndRatings.size()); for (RatedSearchHit hit : hitsAndRatings) { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index e4fe4848237..1e94e869d25 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -158,7 +158,7 @@ public class RankEvalResponseTests extends ESTestCase { " \"details\": {" + " \"coffee_query\": {" + " \"quality_level\": 0.1," + - " \"unknown_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," + + " \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," + " \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"123\",\"_score\":1.0}," + " \"rating\":5}," + " {\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"456\",\"_score\":1.0}," + diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml index 3900b1f32ba..62c246fb320 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml @@ -73,7 +73,7 @@ setup: - match: { quality_level: 1} - match: { details.amsterdam_query.quality_level: 1.0} - - match: { details.amsterdam_query.unknown_docs: [ {"_index": "foo", "_id": "doc4"}]} + - match: { details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]} - match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 2, "docs_retrieved": 2}} - length: { details.amsterdam_query.hits: 3} @@ -85,7 +85,7 @@ setup: - is_false: details.amsterdam_query.hits.2.rating - match: { details.berlin_query.quality_level: 1.0} - - match: { details.berlin_query.unknown_docs: [ {"_index": "foo", "_id": "doc4"}]} + - match: { details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]} - match: { details.berlin_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}} - length: { details.berlin_query.hits: 2} - match: { details.berlin_query.hits.0.hit._id: "doc1" } @@ -155,9 +155,9 @@ setup: - gt: {details.amsterdam_query.quality_level: 0.333} - lt: {details.amsterdam_query.quality_level: 0.334} - match: {details.amsterdam_query.metric_details.mean_reciprocal_rank: {"first_relevant": 3}} - - match: {details.amsterdam_query.unknown_docs: [ {"_index": "foo", "_id": "doc2"}, + - match: {details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc2"}, {"_index": "foo", "_id": "doc3"} ]} - match: {details.berlin_query.quality_level: 0.5} - match: {details.berlin_query.metric_details.mean_reciprocal_rank: {"first_relevant": 2}} - - match: {details.berlin_query.unknown_docs: [ {"_index": "foo", "_id": "doc1"}]} + - match: {details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc1"}]} diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml index fc5e6576ad4..baf10f1542c 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/20_dcg.yml @@ -73,7 +73,7 @@ - lt: {quality_level: 13.848264 } - gt: {details.dcg_query.quality_level: 13.848263} - lt: {details.dcg_query.quality_level: 13.848264} - - match: {details.dcg_query.unknown_docs: [ ]} + - match: {details.dcg_query.unrated_docs: [ ]} # reverse the order in which the results are returned (less relevant docs first) @@ -100,7 +100,7 @@ - lt: {quality_level: 10.299675} - gt: {details.dcg_query_reverse.quality_level: 10.299674} - lt: {details.dcg_query_reverse.quality_level: 10.299675} - - match: {details.dcg_query_reverse.unknown_docs: [ ]} + - match: {details.dcg_query_reverse.unrated_docs: [ ]} # if we mix both, we should get the average @@ -138,7 +138,7 @@ - lt: {quality_level: 12.073970} - gt: {details.dcg_query.quality_level: 13.848263} - lt: {details.dcg_query.quality_level: 13.848264} - - match: {details.dcg_query.unknown_docs: [ ]} + - match: {details.dcg_query.unrated_docs: [ ]} - gt: {details.dcg_query_reverse.quality_level: 10.299674} - lt: {details.dcg_query_reverse.quality_level: 10.299675} - - match: {details.dcg_query_reverse.unknown_docs: [ ]} + - match: {details.dcg_query_reverse.unrated_docs: [ ]} diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml index 4008f677185..d6119ad3a9e 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/30_failures.yml @@ -36,7 +36,7 @@ - match: { quality_level: 1} - match: { details.amsterdam_query.quality_level: 1.0} - - match: { details.amsterdam_query.unknown_docs: [ ]} + - match: { details.amsterdam_query.unrated_docs: [ ]} - match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}} - is_true: failures.invalid_query diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml index f0c564d3639..5e0082d213c 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml @@ -85,7 +85,7 @@ setup: } - match: {quality_level: 0.9} - - match: {details.amsterdam_query.unknown_docs.0._id: "6"} + - match: {details.amsterdam_query.unrated_docs.0._id: "6"} --- "Test illegal request parts": diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 8f09afbb17c..bf0adc6e142 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -57,7 +57,6 @@ public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler DEFAULT_CITY_PROPERTIES = EnumSet.of( - Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME, - Property.CITY_NAME, Property.LOCATION + Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_ISO_CODE, + Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION ); static final Set DEFAULT_COUNTRY_PROPERTIES = EnumSet.of( Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE @@ -377,6 +387,7 @@ public final class GeoIpProcessor extends AbstractProcessor { COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, + REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, @@ -386,7 +397,8 @@ public final class GeoIpProcessor extends AbstractProcessor { static final EnumSet ALL_CITY_PROPERTIES = EnumSet.of( Property.IP, Property.COUNTRY_ISO_CODE, Property.COUNTRY_NAME, Property.CONTINENT_NAME, - Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE, Property.LOCATION + Property.REGION_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE, + Property.LOCATION ); static final EnumSet ALL_COUNTRY_PROPERTIES = EnumSet.of( Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 0aa2eb9fdfa..7a5d6f5808f 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -284,7 +284,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config1.put("properties", Collections.singletonList("invalid")); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config1)); assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " + - "COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]")); + "COUNTRY_NAME, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]")); Map config2 = new HashMap<>(); config2.put("field", "_field"); diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 48a1769cbf8..4c04d4e340a 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -117,11 +117,12 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(address)); @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData.size(), equalTo(8)); + assertThat(geoData.size(), equalTo(9)); assertThat(geoData.get("ip"), equalTo(address)); assertThat(geoData.get("country_iso_code"), equalTo("US")); assertThat(geoData.get("country_name"), equalTo("United States")); assertThat(geoData.get("continent_name"), equalTo("North America")); + assertThat(geoData.get("region_iso_code"), equalTo("US-FL")); assertThat(geoData.get("region_name"), equalTo("Florida")); assertThat(geoData.get("city_name"), equalTo("Hollywood")); assertThat(geoData.get("timezone"), equalTo("America/New_York")); diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml index 0c400c3c0ea..012ca717318 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml @@ -30,11 +30,12 @@ type: test id: 1 - match: { _source.field1: "128.101.101.101" } - - length: { _source.geoip: 5 } + - length: { _source.geoip: 6 } - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - match: { _source.geoip.location.lon: -93.2166 } - match: { _source.geoip.location.lat: 44.9759 } + - match: { _source.geoip.region_iso_code: "US-MN" } - match: { _source.geoip.region_name: "Minnesota" } - match: { _source.geoip.continent_name: "North America" } @@ -54,7 +55,7 @@ { "geoip" : { "field" : "field1", - "properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_name", "continent_name"] + "properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_iso_code", "region_name", "continent_name"] } } ] @@ -75,7 +76,7 @@ type: test id: 1 - match: { _source.field1: "128.101.101.101" } - - length: { _source.geoip: 8 } + - length: { _source.geoip: 9 } - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - match: { _source.geoip.ip: "128.101.101.101" } @@ -83,6 +84,7 @@ - match: { _source.geoip.location.lat: 44.9759 } - match: { _source.geoip.timezone: "America/Chicago" } - match: { _source.geoip.country_name: "United States" } + - match: { _source.geoip.region_iso_code: "US-MN" } - match: { _source.geoip.region_name: "Minnesota" } - match: { _source.geoip.continent_name: "North America" } @@ -188,11 +190,12 @@ type: test id: 2 - match: { _source.field1: "128.101.101.101" } - - length: { _source.geoip: 5 } + - length: { _source.geoip: 6 } - match: { _source.geoip.city_name: "Minneapolis" } - match: { _source.geoip.country_iso_code: "US" } - match: { _source.geoip.location.lon: -93.2166 } - match: { _source.geoip.location.lat: 44.9759 } + - match: { _source.geoip.region_iso_code: "US-MN" } - match: { _source.geoip.region_name: "Minnesota" } - match: { _source.geoip.continent_name: "North America" } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java index 5bda7e1b83d..0a09b6b8789 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java @@ -32,6 +32,7 @@ import io.netty.handler.codec.http.HttpResponseDecoder; import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpUtil; import io.netty.handler.codec.http.HttpVersion; + import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -89,7 +90,6 @@ public class HttpReadWriteHandlerTests extends ESTestCase { private final ResponseDecoder responseDecoder = new ResponseDecoder(); @Before - @SuppressWarnings("unchecked") public void setMocks() { transport = mock(NioHttpServerTransport.class); Settings settings = Settings.EMPTY; diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java index 090fc579c48..9322bfd7122 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java @@ -95,7 +95,6 @@ public class SimpleNioTransportTests extends AbstractSimpleTransportTestCase { @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { - @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 081a1918674..0b936e44e5b 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -19,9 +19,6 @@ package org.elasticsearch.upgrades; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; import org.elasticsearch.client.Request; @@ -34,7 +31,6 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.NotEqualMessageBuilder; @@ -45,7 +41,6 @@ import org.junit.Before; import java.io.IOException; import java.util.ArrayList; import java.util.Base64; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -142,8 +137,9 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); count = randomIntBetween(2000, 3000); byte[] randomByteArray = new byte[16]; @@ -164,16 +160,7 @@ public class FullClusterRestartIT extends ESRestTestCase { count = countOfIndexedRandomDocuments(); } - Map params = new HashMap<>(); - params.put("timeout", "2m"); - params.put("wait_for_status", "green"); - params.put("wait_for_no_relocating_shards", "true"); - params.put("wait_for_events", "languid"); - Map healthRsp = toMap(client().performRequest("GET", "/_cluster/health/" + index, params)); - logger.info("health api response: {}", healthRsp); - assertEquals("green", healthRsp.get("status")); - assertFalse((Boolean) healthRsp.get("timed_out")); - + ensureGreenLongWait(index); assertBasicSearchWorks(count); assertAllSearchWorks(count); assertBasicAggregationWorks(); @@ -205,8 +192,9 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); int numDocs = randomIntBetween(2000, 3000); indexRandomDocuments(numDocs, true, false, i -> { @@ -215,33 +203,26 @@ public class FullClusterRestartIT extends ESRestTestCase { .endObject(); }); logger.info("Refreshing [{}]", index); - client().performRequest("POST", "/" + index + "/_refresh"); + client().performRequest(new Request("POST", "/" + index + "/_refresh")); } else { final int numReplicas = 1; final long startTime = System.currentTimeMillis(); logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, index); - String requestBody = "{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}"; - Response response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, response.getStatusLine().getStatusCode()); + Request setNumberOfReplicas = new Request("PUT", "/" + index + "/_settings"); + setNumberOfReplicas.setJsonEntity("{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}"); + Response response = client().performRequest(setNumberOfReplicas); - Map params = new HashMap<>(); - params.put("timeout", "2m"); - params.put("wait_for_status", "green"); - params.put("wait_for_no_relocating_shards", "true"); - params.put("wait_for_events", "languid"); - Map healthRsp = toMap(client().performRequest("GET", "/_cluster/health/" + index, params)); - assertEquals("green", healthRsp.get("status")); - assertFalse((Boolean) healthRsp.get("timed_out")); + ensureGreenLongWait(index); logger.debug("--> index [{}] is green, took [{}] ms", index, (System.currentTimeMillis() - startTime)); - Map recoverRsp = toMap(client().performRequest("GET", "/" + index + "/_recovery")); + Map recoverRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_recovery"))); logger.debug("--> recovery status:\n{}", recoverRsp); Set counts = new HashSet<>(); for (String node : dataNodes(index, client())) { - Map responseBody = toMap(client().performRequest("GET", "/" + index + "/_search", - Collections.singletonMap("preference", "_only_nodes:" + node))); + Request search = new Request("GET", "/" + index + "/_search"); + search.addParameter("preference", "_only_nodes:" + node); + Map responseBody = entityAsMap(client().performRequest(search)); assertNoFailures(responseBody); int hits = (int) XContentMapValues.extractValue("hits.total", responseBody); counts.add(hits); @@ -282,12 +263,13 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); String aliasName = "%23" + index; // %23 == # - client().performRequest("PUT", "/" + index + "/_alias/" + aliasName); - Response response = client().performRequest("HEAD", "/" + index + "/_alias/" + aliasName); + client().performRequest(new Request("PUT", "/" + index + "/_alias/" + aliasName)); + Response response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + aliasName)); assertEquals(200, response.getStatusLine().getStatusCode()); count = randomIntBetween(32, 128); @@ -301,19 +283,20 @@ public class FullClusterRestartIT extends ESRestTestCase { count = countOfIndexedRandomDocuments(); } - logger.error("clusterState=" + toMap(client().performRequest("GET", "/_cluster/state", - Collections.singletonMap("metric", "metadata")))); + Request request = new Request("GET", "/_cluster/state"); + request.addParameter("metric", "metadata"); + logger.error("clusterState=" + entityAsMap(client().performRequest(request))); // We can read from the alias just like we can read from the index. String aliasName = "%23" + index; // %23 == # - Map searchRsp = toMap(client().performRequest("GET", "/" + aliasName + "/_search")); + Map searchRsp = entityAsMap(client().performRequest(new Request("GET", "/" + aliasName + "/_search"))); int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); assertEquals(count, totalHits); if (runningAgainstOldCluster == false) { // We can remove the alias. - Response response = client().performRequest("DELETE", "/" + index + "/_alias/" + aliasName); + Response response = client().performRequest(new Request("DELETE", "/" + index + "/_alias/" + aliasName)); assertEquals(200, response.getStatusLine().getStatusCode()); // and check that it is gone: - response = client().performRequest("HEAD", "/" + index + "/_alias/" + aliasName); + response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + aliasName)); assertEquals(404, response.getStatusLine().getStatusCode()); } } @@ -330,13 +313,14 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/_template/template_1", Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); - client().performRequest("PUT", "/" + index); + Request createTemplate = new Request("PUT", "/_template/template_1"); + createTemplate.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createTemplate); + client().performRequest(new Request("PUT", "/" + index)); } // verifying if we can still read some properties from cluster state api: - Map clusterState = toMap(client().performRequest("GET", "/_cluster/state")); + Map clusterState = entityAsMap(client().performRequest(new Request("GET", "/_cluster/state"))); // Check some global properties: String clusterName = (String) clusterState.get("cluster_name"); @@ -381,8 +365,9 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); numDocs = randomIntBetween(512, 1024); indexRandomDocuments(numDocs, true, true, i -> { @@ -393,23 +378,20 @@ public class FullClusterRestartIT extends ESRestTestCase { ensureGreen(index); // wait for source index to be available on both nodes before starting shrink - String updateSettingsRequestBody = "{\"settings\": {\"index.blocks.write\": true}}"; - Response rsp = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(), - new StringEntity(updateSettingsRequestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings"); + updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}"); + client().performRequest(updateSettingsRequest); - String shrinkIndexRequestBody = "{\"settings\": {\"index.number_of_shards\": 1}}"; - rsp = client().performRequest("PUT", "/" + index + "/_shrink/" + shrunkenIndex, Collections.emptyMap(), - new StringEntity(shrinkIndexRequestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex); + shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}"); + client().performRequest(shrinkIndexRequest); - rsp = client().performRequest("POST", "/_refresh"); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + client().performRequest(new Request("POST", "/_refresh")); } else { numDocs = countOfIndexedRandomDocuments(); } - Map response = toMap(client().performRequest("GET", "/" + index + "/_search")); + Map response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))); assertNoFailures(response); int totalShards = (int) XContentMapValues.extractValue("_shards.total", response); assertThat(totalShards, greaterThan(1)); @@ -418,7 +400,7 @@ public class FullClusterRestartIT extends ESRestTestCase { int totalHits = (int) XContentMapValues.extractValue("hits.total", response); assertEquals(numDocs, totalHits); - response = toMap(client().performRequest("GET", "/" + shrunkenIndex+ "/_search")); + response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex+ "/_search"))); assertNoFailures(response); totalShards = (int) XContentMapValues.extractValue("_shards.total", response); assertEquals(1, totalShards); @@ -448,8 +430,9 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); numDocs = randomIntBetween(512, 1024); indexRandomDocuments(numDocs, true, true, i -> { @@ -460,23 +443,20 @@ public class FullClusterRestartIT extends ESRestTestCase { } else { ensureGreen(index); // wait for source index to be available on both nodes before starting shrink - String updateSettingsRequestBody = "{\"settings\": {\"index.blocks.write\": true}}"; - Response rsp = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(), - new StringEntity(updateSettingsRequestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings"); + updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}"); + client().performRequest(updateSettingsRequest); - String shrinkIndexRequestBody = "{\"settings\": {\"index.number_of_shards\": 1}}"; - rsp = client().performRequest("PUT", "/" + index + "/_shrink/" + shrunkenIndex, Collections.emptyMap(), - new StringEntity(shrinkIndexRequestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex); + shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}"); + client().performRequest(shrinkIndexRequest); numDocs = countOfIndexedRandomDocuments(); } - Response rsp = client().performRequest("POST", "/_refresh"); - assertEquals(200, rsp.getStatusLine().getStatusCode()); + client().performRequest(new Request("POST", "/_refresh")); - Map response = toMap(client().performRequest("GET", "/" + index + "/_search")); + Map response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))); assertNoFailures(response); int totalShards = (int) XContentMapValues.extractValue("_shards.total", response); assertThat(totalShards, greaterThan(1)); @@ -486,7 +466,7 @@ public class FullClusterRestartIT extends ESRestTestCase { assertEquals(numDocs, totalHits); if (runningAgainstOldCluster == false) { - response = toMap(client().performRequest("GET", "/" + shrunkenIndex + "/_search")); + response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex + "/_search"))); assertNoFailures(response); totalShards = (int) XContentMapValues.extractValue("_shards.total", response); assertEquals(1, totalShards); @@ -499,43 +479,48 @@ public class FullClusterRestartIT extends ESRestTestCase { void assertBasicSearchWorks(int count) throws IOException { logger.info("--> testing basic search"); - Map response = toMap(client().performRequest("GET", "/" + index + "/_search")); - assertNoFailures(response); - int numDocs = (int) XContentMapValues.extractValue("hits.total", response); - logger.info("Found {} in old index", numDocs); - assertEquals(count, numDocs); + { + Map response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))); + assertNoFailures(response); + int numDocs = (int) XContentMapValues.extractValue("hits.total", response); + logger.info("Found {} in old index", numDocs); + assertEquals(count, numDocs); + } logger.info("--> testing basic search with sort"); - String searchRequestBody = "{ \"sort\": [{ \"int\" : \"asc\" }]}"; - response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON))); - assertNoFailures(response); - numDocs = (int) XContentMapValues.extractValue("hits.total", response); - assertEquals(count, numDocs); + { + Request searchRequest = new Request("GET", "/" + index + "/_search"); + searchRequest.setJsonEntity("{ \"sort\": [{ \"int\" : \"asc\" }]}"); + Map response = entityAsMap(client().performRequest(searchRequest)); + assertNoFailures(response); + assertTotalHits(count, response); + } logger.info("--> testing exists filter"); - searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"string\"} }}"; - response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON))); - assertNoFailures(response); - numDocs = (int) XContentMapValues.extractValue("hits.total", response); - assertEquals(count, numDocs); + { + Request searchRequest = new Request("GET", "/" + index + "/_search"); + searchRequest.setJsonEntity("{ \"query\": { \"exists\" : {\"field\": \"string\"} }}"); + Map response = entityAsMap(client().performRequest(searchRequest)); + assertNoFailures(response); + assertTotalHits(count, response); + } - searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}"; - response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON))); - assertNoFailures(response); - numDocs = (int) XContentMapValues.extractValue("hits.total", response); - assertEquals(count, numDocs); + logger.info("--> testing field with dots in the name"); + { + Request searchRequest = new Request("GET", "/" + index + "/_search"); + searchRequest.setJsonEntity("{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}"); + Map response = entityAsMap(client().performRequest(searchRequest)); + assertNoFailures(response); + assertTotalHits(count, response); + } } void assertAllSearchWorks(int count) throws IOException { logger.info("--> testing _all search"); - Map searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search")); - assertNoFailures(searchRsp); - int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); - assertEquals(count, totalHits); - Map bestHit = (Map) ((List)(XContentMapValues.extractValue("hits.hits", searchRsp))).get(0); + Map response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search"))); + assertNoFailures(response); + assertTotalHits(count, response); + Map bestHit = (Map) ((List) (XContentMapValues.extractValue("hits.hits", response))).get(0); // Make sure there are payloads and they are taken into account for the score // the 'string' field has a boost of 4 in the mappings so it should get a payload boost @@ -543,82 +528,77 @@ public class FullClusterRestartIT extends ESRestTestCase { assertNotNull(stringValue); String type = (String) bestHit.get("_type"); String id = (String) bestHit.get("_id"); - String requestBody = "{ \"query\": { \"match_all\" : {} }}"; - String explanation = toStr(client().performRequest("GET", "/" + index + "/" + type + "/" + id, - Collections.emptyMap(), new StringEntity(requestBody, ContentType.APPLICATION_JSON))); + Request explanationRequest = new Request("GET", "/" + index + "/" + type + "/" + id + "/_explain"); + explanationRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}"); + String explanation = toStr(client().performRequest(explanationRequest)); assertFalse("Could not find payload boost in explanation\n" + explanation, explanation.contains("payloadBoost")); // Make sure the query can run on the whole index - searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", - Collections.singletonMap("explain", "true"), new StringEntity(requestBody, ContentType.APPLICATION_JSON))); - assertNoFailures(searchRsp); - totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); - assertEquals(count, totalHits); + Request searchRequest = new Request("GET", "/" + index + "/_search"); + searchRequest.setEntity(explanationRequest.getEntity()); + searchRequest.addParameter("explain", "true"); + Map matchAllResponse = entityAsMap(client().performRequest(searchRequest)); + assertNoFailures(matchAllResponse); + assertTotalHits(count, matchAllResponse); } void assertBasicAggregationWorks() throws IOException { // histogram on a long - String requestBody = "{ \"aggs\": { \"histo\" : {\"histogram\" : {\"field\": \"int\", \"interval\": 10}} }}"; - Map searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON))); - assertNoFailures(searchRsp); - List histoBuckets = (List) XContentMapValues.extractValue("aggregations.histo.buckets", searchRsp); - long totalCount = 0; + Request longHistogramRequest = new Request("GET", "/" + index + "/_search"); + longHistogramRequest.setJsonEntity("{ \"aggs\": { \"histo\" : {\"histogram\" : {\"field\": \"int\", \"interval\": 10}} }}"); + Map longHistogram = entityAsMap(client().performRequest(longHistogramRequest)); + assertNoFailures(longHistogram); + List histoBuckets = (List) XContentMapValues.extractValue("aggregations.histo.buckets", longHistogram); + int histoCount = 0; for (Object entry : histoBuckets) { Map bucket = (Map) entry; - totalCount += (Integer) bucket.get("doc_count"); + histoCount += (Integer) bucket.get("doc_count"); } - int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); - assertEquals(totalHits, totalCount); + assertTotalHits(histoCount, longHistogram); // terms on a boolean - requestBody = "{ \"aggs\": { \"bool_terms\" : {\"terms\" : {\"field\": \"bool\"}} }}"; - searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON))); - List termsBuckets = (List) XContentMapValues.extractValue("aggregations.bool_terms.buckets", searchRsp); - totalCount = 0; + Request boolTermsRequest = new Request("GET", "/" + index + "/_search"); + boolTermsRequest.setJsonEntity("{ \"aggs\": { \"bool_terms\" : {\"terms\" : {\"field\": \"bool\"}} }}"); + Map boolTerms = entityAsMap(client().performRequest(boolTermsRequest)); + List termsBuckets = (List) XContentMapValues.extractValue("aggregations.bool_terms.buckets", boolTerms); + int termsCount = 0; for (Object entry : termsBuckets) { Map bucket = (Map) entry; - totalCount += (Integer) bucket.get("doc_count"); + termsCount += (Integer) bucket.get("doc_count"); } - totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); - assertEquals(totalHits, totalCount); + assertTotalHits(termsCount, boolTerms); } void assertRealtimeGetWorks() throws IOException { - String requestBody = "{ \"index\": { \"refresh_interval\" : -1 }}"; - Response response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, response.getStatusLine().getStatusCode()); + Request disableAutoRefresh = new Request("PUT", "/" + index + "/_settings"); + disableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : -1 }}"); + client().performRequest(disableAutoRefresh); - requestBody = "{ \"query\": { \"match_all\" : {} }}"; - Map searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON))); - Map hit = (Map) ((List)(XContentMapValues.extractValue("hits.hits", searchRsp))).get(0); + Request searchRequest = new Request("GET", "/" + index + "/_search"); + searchRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}"); + Map searchResponse = entityAsMap(client().performRequest(searchRequest)); + Map hit = (Map) ((List)(XContentMapValues.extractValue("hits.hits", searchResponse))).get(0); String docId = (String) hit.get("_id"); - requestBody = "{ \"doc\" : { \"foo\": \"bar\"}}"; - response = client().performRequest("POST", "/" + index + "/doc/" + docId + "/_update", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, response.getStatusLine().getStatusCode()); + Request updateRequest = new Request("POST", "/" + index + "/doc/" + docId + "/_update"); + updateRequest.setJsonEntity("{ \"doc\" : { \"foo\": \"bar\"}}"); + client().performRequest(updateRequest); - Map getRsp = toMap(client().performRequest("GET", "/" + index + "/doc/" + docId)); + Map getRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/doc/" + docId))); Map source = (Map) getRsp.get("_source"); assertTrue("doc does not contain 'foo' key: " + source, source.containsKey("foo")); - requestBody = "{ \"index\": { \"refresh_interval\" : \"1s\" }}"; - response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(), - new StringEntity(requestBody, ContentType.APPLICATION_JSON)); - assertEquals(200, response.getStatusLine().getStatusCode()); + Request enableAutoRefresh = new Request("PUT", "/" + index + "/_settings"); + enableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : \"1s\" }}"); + client().performRequest(enableAutoRefresh); } void assertStoredBinaryFields(int count) throws Exception { - String requestBody = "{ \"query\": { \"match_all\" : {} }, \"size\": 100, \"stored_fields\": \"binary\"}"; - Map rsp = toMap(client().performRequest("GET", "/" + index + "/_search", - Collections.emptyMap(), new StringEntity(requestBody, ContentType.APPLICATION_JSON))); + Request request = new Request("GET", "/" + index + "/_search"); + request.setJsonEntity("{ \"query\": { \"match_all\" : {} }, \"size\": 100, \"stored_fields\": \"binary\"}"); + Map rsp = entityAsMap(client().performRequest(request)); - int totalCount = (Integer) XContentMapValues.extractValue("hits.total", rsp); - assertEquals(count, totalCount); + assertTotalHits(count, rsp); List hits = (List) XContentMapValues.extractValue("hits.hits", rsp); assertEquals(100, hits.size()); for (Object hit : hits) { @@ -631,14 +611,6 @@ public class FullClusterRestartIT extends ESRestTestCase { } } - static Map toMap(Response response) throws IOException { - return toMap(EntityUtils.toString(response.getEntity())); - } - - static Map toMap(String response) throws IOException { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); - } - static String toStr(Response response) throws IOException { return EntityUtils.toString(response.getEntity()); } @@ -648,6 +620,11 @@ public class FullClusterRestartIT extends ESRestTestCase { assertEquals(0, failed); } + static void assertTotalHits(int expectedTotalHits, Map response) { + int actualTotalHits = (Integer) XContentMapValues.extractValue("hits.total", response); + assertEquals(expectedTotalHits, actualTotalHits); + } + /** * Tests that a single document survives. Super basic smoke test. */ @@ -656,11 +633,12 @@ public class FullClusterRestartIT extends ESRestTestCase { String doc = "{\"test\": \"test\"}"; if (runningAgainstOldCluster) { - client().performRequest("PUT", docLocation, singletonMap("refresh", "true"), - new StringEntity(doc, ContentType.APPLICATION_JSON)); + Request createDoc = new Request("PUT", docLocation); + createDoc.setJsonEntity(doc); + client().performRequest(createDoc); } - assertThat(toStr(client().performRequest("GET", docLocation)), containsString(doc)); + assertThat(toStr(client().performRequest(new Request("GET", docLocation))), containsString(doc)); } /** @@ -733,16 +711,18 @@ public class FullClusterRestartIT extends ESRestTestCase { } // Count the documents in the index to make sure we have as many as we put there - String countResponse = toStr(client().performRequest("GET", "/" + index + "/_search", singletonMap("size", "0"))); + Request countRequest = new Request("GET", "/" + index + "/_search"); + countRequest.addParameter("size", "0"); + String countResponse = toStr(client().performRequest(countRequest)); assertThat(countResponse, containsString("\"total\":" + count)); if (false == runningAgainstOldCluster) { boolean restoredFromTranslog = false; boolean foundPrimary = false; - Map params = new HashMap<>(); - params.put("h", "index,shard,type,stage,translog_ops_recovered"); - params.put("s", "index,shard,type"); - String recoveryResponse = toStr(client().performRequest("GET", "/_cat/recovery/" + index, params)); + Request recoveryRequest = new Request("GET", "/_cat/recovery/" + index); + recoveryRequest.addParameter("h", "index,shard,type,stage,translog_ops_recovered"); + recoveryRequest.addParameter("s", "index,shard,type"); + String recoveryResponse = toStr(client().performRequest(recoveryRequest)); for (String line : recoveryResponse.split("\n")) { // Find the primaries foundPrimary = true; @@ -768,11 +748,10 @@ public class FullClusterRestartIT extends ESRestTestCase { if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) { int numCurrentVersion = 0; int numBwcVersion = 0; - params.clear(); - params.put("h", "prirep,shard,index,version"); - params.put("s", "prirep,shard,index"); - String segmentsResponse = toStr( - client().performRequest("GET", "/_cat/segments/" + index, params)); + Request segmentsRequest = new Request("GET", "/_cat/segments/" + index); + segmentsRequest.addParameter("h", "prirep,shard,index,version"); + segmentsRequest.addParameter("s", "prirep,shard,index"); + String segmentsResponse = toStr(client().performRequest(segmentsRequest)); for (String line : segmentsResponse.split("\n")) { if (false == line.startsWith("p")) { continue; @@ -817,14 +796,16 @@ public class FullClusterRestartIT extends ESRestTestCase { refresh(); // Count the documents in the index to make sure we have as many as we put there - String countResponse = toStr(client().performRequest("GET", "/" + index + "/_search", singletonMap("size", "0"))); + Request countRequest = new Request("GET", "/" + index + "/_search"); + countRequest.addParameter("size", "0"); + String countResponse = toStr(client().performRequest(countRequest)); assertThat(countResponse, containsString("\"total\":" + count)); // Stick a routing attribute into to cluster settings so we can see it after the restore - HttpEntity routingSetting = new StringEntity( - "{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}", - ContentType.APPLICATION_JSON); - client().performRequest("PUT", "/_cluster/settings", emptyMap(), routingSetting); + Request addRoutingSettings = new Request("PUT", "/_cluster/settings"); + addRoutingSettings.setJsonEntity( + "{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}"); + client().performRequest(addRoutingSettings); // Stick a template into the cluster so we can see it after the restore XContentBuilder templateBuilder = JsonXContent.contentBuilder().startObject(); @@ -857,8 +838,9 @@ public class FullClusterRestartIT extends ESRestTestCase { templateBuilder.endObject(); } templateBuilder.endObject().endObject(); - client().performRequest("PUT", "/_template/test_template", emptyMap(), - new StringEntity(Strings.toString(templateBuilder), ContentType.APPLICATION_JSON)); + Request createTemplateRequest = new Request("PUT", "/_template/test_template"); + createTemplateRequest.setJsonEntity(Strings.toString(templateBuilder)); + client().performRequest(createTemplateRequest); if (runningAgainstOldCluster) { // Create the repo @@ -871,13 +853,15 @@ public class FullClusterRestartIT extends ESRestTestCase { repoConfig.endObject(); } repoConfig.endObject(); - client().performRequest("PUT", "/_snapshot/repo", emptyMap(), - new StringEntity(Strings.toString(repoConfig), ContentType.APPLICATION_JSON)); + Request createRepoRequest = new Request("PUT", "/_snapshot/repo"); + createRepoRequest.setJsonEntity(Strings.toString(repoConfig)); + client().performRequest(createRepoRequest); } - client().performRequest("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"), - singletonMap("wait_for_completion", "true"), - new StringEntity("{\"indices\": \"" + index + "\"}", ContentType.APPLICATION_JSON)); + Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap")); + createSnapshot.addParameter("wait_for_completion", "true"); + createSnapshot.setJsonEntity("{\"indices\": \"" + index + "\"}"); + client().performRequest(createSnapshot); checkSnapshot("old_snap", count, oldClusterVersion); if (false == runningAgainstOldCluster) { @@ -896,10 +880,13 @@ public class FullClusterRestartIT extends ESRestTestCase { mappingsAndSettings.endObject(); } mappingsAndSettings.endObject(); - client().performRequest("PUT", "/" + index, Collections.emptyMap(), - new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON)); + Request createIndex = new Request("PUT", "/" + index); + createIndex.setJsonEntity(Strings.toString(mappingsAndSettings)); + client().performRequest(createIndex); } else { - Response response = client().performRequest("GET", index + "/_stats", singletonMap("level", "shards")); + Request statsRequest = new Request("GET", index + "/_stats"); + statsRequest.addParameter("level", "shards"); + Response response = client().performRequest(statsRequest); List shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0"); String globalHistoryUUID = null; for (Object shard : shardStats) { @@ -920,18 +907,20 @@ public class FullClusterRestartIT extends ESRestTestCase { private void checkSnapshot(String snapshotName, int count, Version tookOnVersion) throws IOException { // Check the snapshot metadata, especially the version - String response = toStr(client().performRequest("GET", "/_snapshot/repo/" + snapshotName, listSnapshotVerboseParams())); - Map map = toMap(response); - assertEquals(response, singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", map)); - assertEquals(response, singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", map)); - assertEquals(response, singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", map)); + Request listSnapshotRequest = new Request("GET", "/_snapshot/repo/" + snapshotName); + if (false == (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0))) { + listSnapshotRequest.addParameter("verbose", "true"); + } + Map listSnapshotResponse = entityAsMap(client().performRequest(listSnapshotRequest)); + assertEquals(singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", listSnapshotResponse)); + assertEquals(singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", listSnapshotResponse)); + assertEquals(singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", listSnapshotResponse)); // Remove the routing setting and template so we can test restoring them. - HttpEntity clearRoutingSetting = new StringEntity( - "{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}", - ContentType.APPLICATION_JSON); - client().performRequest("PUT", "/_cluster/settings", emptyMap(), clearRoutingSetting); - client().performRequest("DELETE", "/_template/test_template", emptyMap(), clearRoutingSetting); + Request clearRoutingFromSettings = new Request("PUT", "/_cluster/settings"); + clearRoutingFromSettings.setJsonEntity("{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}"); + client().performRequest(clearRoutingFromSettings); + client().performRequest(new Request("DELETE", "/_template/test_template")); // Restore XContentBuilder restoreCommand = JsonXContent.contentBuilder().startObject(); @@ -940,11 +929,15 @@ public class FullClusterRestartIT extends ESRestTestCase { restoreCommand.field("rename_pattern", index); restoreCommand.field("rename_replacement", "restored_" + index); restoreCommand.endObject(); - client().performRequest("POST", "/_snapshot/repo/" + snapshotName + "/_restore", singletonMap("wait_for_completion", "true"), - new StringEntity(Strings.toString(restoreCommand), ContentType.APPLICATION_JSON)); + Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshotName + "/_restore"); + restoreRequest.addParameter("wait_for_completion", "true"); + restoreRequest.setJsonEntity(Strings.toString(restoreCommand)); + client().performRequest(restoreRequest); // Make sure search finds all documents - String countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0"))); + Request countRequest = new Request("GET", "/restored_" + index + "/_search"); + countRequest.addParameter("size", "0"); + String countResponse = toStr(client().performRequest(countRequest)); assertThat(countResponse, containsString("\"total\":" + count)); // Add some extra documents to the index to be sure we can still write to it after restoring it @@ -954,61 +947,56 @@ public class FullClusterRestartIT extends ESRestTestCase { bulk.append("{\"index\":{\"_id\":\"").append(count + i).append("\"}}\n"); bulk.append("{\"test\":\"test\"}\n"); } - client().performRequest("POST", "/restored_" + index + "/doc/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/doc/_bulk"); + writeToRestoredRequest.addParameter("refresh", "true"); + writeToRestoredRequest.setJsonEntity(bulk.toString()); + client().performRequest(writeToRestoredRequest); // And count to make sure the add worked // Make sure search finds all documents - countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0"))); - assertThat(countResponse, containsString("\"total\":" + (count + extras))); + Request countAfterWriteRequest = new Request("GET", "/restored_" + index + "/_search"); + countAfterWriteRequest.addParameter("size", "0"); + String countAfterWriteResponse = toStr(client().performRequest(countAfterWriteRequest)); + assertThat(countAfterWriteResponse, containsString("\"total\":" + (count + extras))); // Clean up the index for the next iteration - client().performRequest("DELETE", "/restored_*"); + client().performRequest(new Request("DELETE", "/restored_*")); // Check settings added by the restore process - map = toMap(client().performRequest("GET", "/_cluster/settings", singletonMap("flat_settings", "true"))); - Map expected = new HashMap<>(); - expected.put("transient", emptyMap()); - expected.put("persistent", singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString())); - if (expected.equals(map) == false) { + Request clusterSettingsRequest = new Request("GET", "/_cluster/settings"); + clusterSettingsRequest.addParameter("flat_settings", "true"); + Map clusterSettingsResponse = entityAsMap(client().performRequest(clusterSettingsRequest)); + Map expectedClusterSettings = new HashMap<>(); + expectedClusterSettings.put("transient", emptyMap()); + expectedClusterSettings.put("persistent", + singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString())); + if (expectedClusterSettings.equals(clusterSettingsResponse) == false) { NotEqualMessageBuilder builder = new NotEqualMessageBuilder(); - builder.compareMaps(map, expected); + builder.compareMaps(clusterSettingsResponse, expectedClusterSettings); fail("settings don't match:\n" + builder.toString()); } // Check that the template was restored successfully - map = toMap(client().performRequest("GET", "/_template/test_template")); - expected = new HashMap<>(); + Map getTemplateResponse = entityAsMap(client().performRequest(new Request("GET", "/_template/test_template"))); + Map expectedTemplate = new HashMap<>(); if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_0_0_beta1)) { - expected.put("template", "evil_*"); + expectedTemplate.put("template", "evil_*"); } else { - expected.put("index_patterns", singletonList("evil_*")); + expectedTemplate.put("index_patterns", singletonList("evil_*")); } - expected.put("settings", singletonMap("index", singletonMap("number_of_shards", "1"))); - expected.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true)))); - expected.put("order", 0); + expectedTemplate.put("settings", singletonMap("index", singletonMap("number_of_shards", "1"))); + expectedTemplate.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true)))); + expectedTemplate.put("order", 0); Map aliases = new HashMap<>(); aliases.put("alias1", emptyMap()); aliases.put("alias2", singletonMap("filter", singletonMap("term", singletonMap("version", tookOnVersion.toString())))); - expected.put("aliases", aliases); - expected = singletonMap("test_template", expected); - if (false == expected.equals(map)) { + expectedTemplate.put("aliases", aliases); + expectedTemplate = singletonMap("test_template", expectedTemplate); + if (false == expectedTemplate.equals(getTemplateResponse)) { NotEqualMessageBuilder builder = new NotEqualMessageBuilder(); - builder.compareMaps(map, expected); + builder.compareMaps(getTemplateResponse, expectedTemplate); fail("template doesn't match:\n" + builder.toString()); } - - } - - /** - * Parameters required to get the version of Elasticsearch that took the snapshot. - * On versions after 5.5 we need a {@code verbose} parameter. - */ - private Map listSnapshotVerboseParams() { - if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0)) { - return emptyMap(); - } - return singletonMap("verbose", "true"); } // TODO tests for upgrades after shrink. We've had trouble with shrink in the past. @@ -1018,14 +1006,15 @@ public class FullClusterRestartIT extends ESRestTestCase { logger.info("Indexing {} random documents", count); for (int i = 0; i < count; i++) { logger.debug("Indexing document [{}]", i); - client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(), - new StringEntity(Strings.toString(docSupplier.apply(i)), ContentType.APPLICATION_JSON)); + Request createDocument = new Request("POST", "/" + index + "/doc/" + i); + createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i))); + client().performRequest(createDocument); if (rarely()) { refresh(); } if (flushAllowed && rarely()) { logger.debug("Flushing [{}]", index); - client().performRequest("POST", "/" + index + "/_flush"); + client().performRequest(new Request("POST", "/" + index + "/_flush")); } } if (saveInfo) { @@ -1042,13 +1031,16 @@ public class FullClusterRestartIT extends ESRestTestCase { infoDoc.field("value", value); infoDoc.endObject(); // Only create the first version so we know how many documents are created when the index is first created - Map params = singletonMap("op_type", "create"); - client().performRequest("PUT", "/info/doc/" + index + "_" + type, params, - new StringEntity(Strings.toString(infoDoc), ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/info/doc/" + index + "_" + type); + request.addParameter("op_type", "create"); + request.setJsonEntity(Strings.toString(infoDoc)); + client().performRequest(request); } private String loadInfoDocument(String type) throws IOException { - String doc = toStr(client().performRequest("GET", "/info/doc/" + index + "_" + type, singletonMap("filter_path", "_source"))); + Request request = new Request("GET", "/info/doc/" + index + "_" + type); + request.addParameter("filter_path", "_source"); + String doc = toStr(client().performRequest(request)); Matcher m = Pattern.compile("\"value\":\"(.+)\"").matcher(doc); assertTrue(doc, m.find()); return m.group(1); @@ -1060,11 +1052,13 @@ public class FullClusterRestartIT extends ESRestTestCase { private void refresh() throws IOException { logger.debug("Refreshing [{}]", index); - client().performRequest("POST", "/" + index + "/_refresh"); + client().performRequest(new Request("POST", "/" + index + "/_refresh")); } private List dataNodes(String index, RestClient client) throws IOException { - Response response = client.performRequest("GET", index + "/_stats", singletonMap("level", "shards")); + Request request = new Request("GET", index + "/_stats"); + request.addParameter("level", "shards"); + Response response = client.performRequest(request); List nodes = new ArrayList<>(); List shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0"); for (Object shard : shardStats) { @@ -1073,4 +1067,21 @@ public class FullClusterRestartIT extends ESRestTestCase { } return nodes; } + + /** + * Wait for an index to have green health, waiting longer than + * {@link ESRestTestCase#ensureGreen}. + */ + protected void ensureGreenLongWait(String index) throws IOException { + Request request = new Request("GET", "/_cluster/health/" + index); + request.addParameter("timeout", "2m"); + request.addParameter("wait_for_status", "green"); + request.addParameter("wait_for_no_relocating_shards", "true"); + request.addParameter("wait_for_events", "languid"); + request.addParameter("level", "shards"); + Map healthRsp = entityAsMap(client().performRequest(request)); + logger.info("health api response: {}", healthRsp); + assertEquals("green", healthRsp.get("status")); + assertFalse((Boolean) healthRsp.get("timed_out")); + } } diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 809cd40d698..062016909b6 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.upgrades; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.Version; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Request; @@ -32,14 +30,12 @@ import org.elasticsearch.test.rest.yaml.ObjectPath; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.Future; import java.util.function.Predicate; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength; -import static java.util.Collections.emptyMap; import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING; import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY; @@ -51,6 +47,8 @@ import static org.hamcrest.Matchers.notNullValue; * In depth testing of the recovery mechanism during a rolling restart. */ public class RecoveryIT extends AbstractRollingTestCase { + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31291") public void testHistoryUUIDIsGenerated() throws Exception { final String index = "index_history_uuid"; if (CLUSTER_TYPE == ClusterType.OLD) { @@ -65,8 +63,9 @@ public class RecoveryIT extends AbstractRollingTestCase { createIndex(index, settings.build()); } else if (CLUSTER_TYPE == ClusterType.UPGRADED) { ensureGreen(index); - Response response = client().performRequest("GET", index + "/_stats", Collections.singletonMap("level", "shards")); - assertOK(response); + Request shardStatsRequest = new Request("GET", index + "/_stats"); + shardStatsRequest.addParameter("level", "shards"); + Response response = client().performRequest(shardStatsRequest); ObjectPath objectPath = ObjectPath.createFromResponse(response); List shardStats = objectPath.evaluate("indices." + index + ".shards.0"); assertThat(shardStats, hasSize(2)); @@ -87,8 +86,9 @@ public class RecoveryIT extends AbstractRollingTestCase { private int indexDocs(String index, final int idStart, final int numDocs) throws IOException { for (int i = 0; i < numDocs; i++) { final int id = idStart + i; - assertOK(client().performRequest("PUT", index + "/test/" + id, emptyMap(), - new StringEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}", ContentType.APPLICATION_JSON))); + Request indexDoc = new Request("PUT", index + "/test/" + id); + indexDoc.setJsonEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}"); + client().performRequest(indexDoc); } return numDocs; } @@ -113,7 +113,7 @@ public class RecoveryIT extends AbstractRollingTestCase { public void testRecoveryWithConcurrentIndexing() throws Exception { final String index = "recovery_with_concurrent_indexing"; - Response response = client().performRequest("GET", "_nodes"); + Response response = client().performRequest(new Request("GET", "_nodes")); ObjectPath objectPath = ObjectPath.createFromResponse(response); final Map nodeMap = objectPath.evaluate("nodes"); List nodes = new ArrayList<>(nodeMap.keySet()); @@ -139,7 +139,7 @@ public class RecoveryIT extends AbstractRollingTestCase { updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null)); asyncIndexDocs(index, 10, 50).get(); ensureGreen(index); - assertOK(client().performRequest("POST", index + "/_refresh")); + client().performRequest(new Request("POST", index + "/_refresh")); assertCount(index, "_only_nodes:" + nodes.get(0), 60); assertCount(index, "_only_nodes:" + nodes.get(1), 60); assertCount(index, "_only_nodes:" + nodes.get(2), 60); @@ -150,7 +150,7 @@ public class RecoveryIT extends AbstractRollingTestCase { updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null)); asyncIndexDocs(index, 60, 50).get(); ensureGreen(index); - assertOK(client().performRequest("POST", index + "/_refresh")); + client().performRequest(new Request("POST", index + "/_refresh")); assertCount(index, "_only_nodes:" + nodes.get(0), 110); assertCount(index, "_only_nodes:" + nodes.get(1), 110); assertCount(index, "_only_nodes:" + nodes.get(2), 110); @@ -161,15 +161,16 @@ public class RecoveryIT extends AbstractRollingTestCase { } private void assertCount(final String index, final String preference, final int expectedCount) throws IOException { - final Response response = client().performRequest("GET", index + "/_count", Collections.singletonMap("preference", preference)); - assertOK(response); + final Request request = new Request("GET", index + "/_count"); + request.addParameter("preference", preference); + final Response response = client().performRequest(request); final int actualCount = Integer.parseInt(ObjectPath.createFromResponse(response).evaluate("count").toString()); assertThat(actualCount, equalTo(expectedCount)); } private String getNodeId(Predicate versionPredicate) throws IOException { - Response response = client().performRequest("GET", "_nodes"); + Response response = client().performRequest(new Request("GET", "_nodes")); ObjectPath objectPath = ObjectPath.createFromResponse(response); Map nodesAsMap = objectPath.evaluate("nodes"); for (String id : nodesAsMap.keySet()) { @@ -216,7 +217,7 @@ public class RecoveryIT extends AbstractRollingTestCase { updateIndexSettings(index, Settings.builder().put("index.routing.allocation.include._id", newNode)); asyncIndexDocs(index, 10, 50).get(); ensureGreen(index); - assertOK(client().performRequest("POST", index + "/_refresh")); + client().performRequest(new Request("POST", index + "/_refresh")); assertCount(index, "_only_nodes:" + newNode, 60); break; case UPGRADED: @@ -226,8 +227,8 @@ public class RecoveryIT extends AbstractRollingTestCase { ); asyncIndexDocs(index, 60, 50).get(); ensureGreen(index); - assertOK(client().performRequest("POST", index + "/_refresh")); - Response response = client().performRequest("GET", "_nodes"); + client().performRequest(new Request("POST", index + "/_refresh")); + Response response = client().performRequest(new Request("GET", "_nodes")); ObjectPath objectPath = ObjectPath.createFromResponse(response); final Map nodeMap = objectPath.evaluate("nodes"); List nodes = new ArrayList<>(nodeMap.keySet()); diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java index 3aada7837d8..20561115542 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java @@ -57,6 +57,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.isEmptyString; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeThat; import static org.junit.Assume.assumeTrue; @@ -302,5 +303,26 @@ public abstract class ArchiveTestCase extends PackagingTestCase { } } + public void test90SecurityCliPackaging() { + assumeThat(installation, is(notNullValue())); + + final Installation.Executables bin = installation.executables(); + final Shell sh = new Shell(); + + if (distribution().equals(Distribution.DEFAULT_TAR) || distribution().equals(Distribution.DEFAULT_ZIP)) { + assertTrue(Files.exists(installation.lib.resolve("tools").resolve("security-cli"))); + Platforms.onLinux(() -> { + final Result result = sh.run(bin.elasticsearchCertutil + " help"); + assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack")); + }); + + Platforms.onWindows(() -> { + final Result result = sh.run(bin.elasticsearchCertutil + " help"); + assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack")); + }); + } else if (distribution().equals(Distribution.OSS_TAR) || distribution().equals(Distribution.OSS_ZIP)) { + assertFalse(Files.exists(installation.lib.resolve("tools").resolve("security-cli"))); + } + } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java index 40dc546f230..8bc3fc6e14d 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java @@ -101,6 +101,7 @@ public class Installation { public final Path elasticsearchPlugin = platformExecutable("elasticsearch-plugin"); public final Path elasticsearchKeystore = platformExecutable("elasticsearch-keystore"); public final Path elasticsearchTranslog = platformExecutable("elasticsearch-translog"); + public final Path elasticsearchCertutil = platformExecutable("elasticsearch-certutil"); private Path platformExecutable(String name) { final String platformExecutableName = Platforms.WINDOWS diff --git a/server/build.gradle b/server/build.gradle index c71cc4c7dbd..deb38398979 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -106,7 +106,7 @@ dependencies { compile 'com.carrotsearch:hppc:0.7.1' // time handling, remove with java 8 time - compile 'joda-time:joda-time:2.9.9' + compile 'joda-time:joda-time:2.10' // percentiles aggregation compile 'com.tdunning:t-digest:3.2' diff --git a/server/licenses/joda-time-2.10.jar.sha1 b/server/licenses/joda-time-2.10.jar.sha1 new file mode 100644 index 00000000000..a597eabc654 --- /dev/null +++ b/server/licenses/joda-time-2.10.jar.sha1 @@ -0,0 +1 @@ +f66c8125d1057ffce6c4e29e624cac863e110e2b \ No newline at end of file diff --git a/server/licenses/joda-time-2.9.9.jar.sha1 b/server/licenses/joda-time-2.9.9.jar.sha1 deleted file mode 100644 index 4009932ea3b..00000000000 --- a/server/licenses/joda-time-2.9.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f7b520c458572890807d143670c9b24f4de90897 \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/action/AliasesRequest.java b/server/src/main/java/org/elasticsearch/action/AliasesRequest.java index bf7ceb28d50..419287f28eb 100644 --- a/server/src/main/java/org/elasticsearch/action/AliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/AliasesRequest.java @@ -32,6 +32,11 @@ public interface AliasesRequest extends IndicesRequest.Replaceable { */ String[] aliases(); + /** + * Returns the aliases as they were originally requested, before any potential name resolution + */ + String[] getOriginalAliases(); + /** * Replaces current aliases with the provided aliases. * diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java index 19b0517d96c..173a44b67cd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterGetSettingsResponse.java @@ -48,7 +48,6 @@ public class ClusterGetSettingsResponse extends ActionResponse implements ToXCon static final String TRANSIENT_FIELD = "transient"; static final String DEFAULTS_FIELD = "defaults"; - @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "cluster_get_settings_response", diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index 9249550871c..22e8554ed6a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -214,6 +214,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest aliases = new HashSet<>(); for (AliasActions action : actions) { String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), action.indices()); - Collections.addAll(aliases, action.aliases()); + Collections.addAll(aliases, action.getOriginalAliases()); for (String index : concreteIndices) { switch (action.actionType()) { case ADD: @@ -142,7 +142,7 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction> aliasMetaData = metaData.findAliases(action.aliases(), indexAsArray); + ImmutableOpenMap> aliasMetaData = metaData.findAliases(action, indexAsArray); List finalAliases = new ArrayList<>(); for (ObjectCursor> curAliases : aliasMetaData.values()) { for (AliasMetaData aliasMeta: curAliases.value) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java index 1bacd652ee7..2b71e85a537 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/TransportGetAliasesAction.java @@ -63,7 +63,7 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction listener) { String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request); - ImmutableOpenMap> aliases = state.metaData().findAliases(request.aliases(), concreteIndices); + ImmutableOpenMap> aliases = state.metaData().findAliases(request, concreteIndices); listener.onResponse(new GetAliasesResponse(postProcess(request, concreteIndices, aliases))); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java index 060c345454a..584ad0bc55a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java @@ -32,15 +32,14 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.common.settings.IndexScopedSettings; import java.io.IOException; import java.util.List; @@ -110,7 +109,7 @@ public class TransportGetIndexAction extends TransportClusterInfoAction PARSER = new ConstructingObjectParser<>( "query_explanation", true, diff --git a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java index 9ee59cf70d0..455aab7f6e3 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetResponse.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetResponse.java @@ -129,7 +129,6 @@ public class GetResponse extends ActionResponse implements Iterable getSourceAsMap() throws ElasticsearchParseException { return getResult.sourceAsMap(); } diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index d15b7b92d62..e0a6cd82786 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.get; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -90,9 +89,9 @@ public class TransportShardMultiGetAction extends TransportSingleShardAction new ParameterizedMessage("{} failed to execute multi_get for [{}]/[{}]", shardId, item.type(), item.id()), e); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java index 101ce7ec260..3f41aaddfb7 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java @@ -32,8 +32,8 @@ import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; public class SimulateProcessorResult implements Writeable, ToXContentObject { @@ -42,7 +42,6 @@ public class SimulateProcessorResult implements Writeable, ToXContentObject { private final WriteableIngestDocument ingestDocument; private final Exception failure; - @SuppressWarnings("unchecked") private static final ConstructingObjectParser IGNORED_ERROR_PARSER = new ConstructingObjectParser<>( "ignored_error_parser", @@ -57,7 +56,6 @@ public class SimulateProcessorResult implements Writeable, ToXContentObject { ); } - @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "simulate_processor_result", diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 2430868bb59..6331097024c 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -94,7 +94,6 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment { ); } - @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "writeable_ingest_document", diff --git a/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java b/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java index cdd895ff8cd..8598ab3e4be 100644 --- a/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java +++ b/server/src/main/java/org/elasticsearch/action/support/ActiveShardCount.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.support; import com.carrotsearch.hppc.cursors.IntObjectCursor; + import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -205,7 +206,7 @@ public final class ActiveShardCount implements Writeable { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") ActiveShardCount that = (ActiveShardCount) o; + ActiveShardCount that = (ActiveShardCount) o; return value == that.value; } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java index 9dc7a899d03..7b137fb418c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java @@ -72,7 +72,6 @@ public abstract class ReplicationRequestBuilder new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), t); + logger.debug(() -> new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), e); response.add(request.locations.get(i), - new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), t)); + new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), e)); } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java index 138788251c9..066f00c2cd1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState.Custom; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -165,7 +166,7 @@ public class RestoreInProgress extends AbstractNamedDiffable implements if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") Entry entry = (Entry) o; + Entry entry = (Entry) o; return snapshot.equals(entry.snapshot) && state == entry.state && indices.equals(entry.indices) && @@ -291,7 +292,7 @@ public class RestoreInProgress extends AbstractNamedDiffable implements return false; } - @SuppressWarnings("unchecked") ShardRestoreStatus status = (ShardRestoreStatus) o; + ShardRestoreStatus status = (ShardRestoreStatus) o; return state == status.state && Objects.equals(nodeId, status.nodeId) && Objects.equals(reason, status.reason); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index 74789aada3a..3bb9d42a578 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -161,7 +161,6 @@ public final class IndexGraveyard implements MetaData.Custom { } @Override - @SuppressWarnings("unchecked") public Diff diff(final MetaData.Custom previous) { return new IndexGraveyardDiff((IndexGraveyard) previous, this); } @@ -321,7 +320,7 @@ public final class IndexGraveyard implements MetaData.Custom { @Override public IndexGraveyard apply(final MetaData.Custom previous) { - @SuppressWarnings("unchecked") final IndexGraveyard old = (IndexGraveyard) previous; + final IndexGraveyard old = (IndexGraveyard) previous; if (removedCount > old.tombstones.size()) { throw new IllegalStateException("IndexGraveyardDiff cannot remove [" + removedCount + "] entries from [" + old.tombstones.size() + "] tombstones."); @@ -416,7 +415,7 @@ public final class IndexGraveyard implements MetaData.Custom { if (other == null || getClass() != other.getClass()) { return false; } - @SuppressWarnings("unchecked") Tombstone that = (Tombstone) other; + Tombstone that = (Tombstone) other; return index.equals(that.index) && deleteDateInMillis == that.deleteDateInMillis; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 90380205012..18b89db72a3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -23,6 +23,7 @@ import com.carrotsearch.hppc.LongArrayList; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.RolloverInfo; import org.elasticsearch.action.support.ActiveShardCount; @@ -685,7 +686,6 @@ public class IndexMetaData implements Diffable, ToXContentFragmen return lookupPrototypeSafe(key).readFrom(in); } - @SuppressWarnings("unchecked") @Override public Diff readDiff(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readDiffFrom(in); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index ae58d2885bb..d35a4baa1e6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -381,7 +381,6 @@ public class IndexTemplateMetaData extends AbstractDiffable, Diffable, To private final SortedMap aliasAndIndexLookup; - @SuppressWarnings("unchecked") MetaData(String clusterUUID, long version, Settings transientSettings, Settings persistentSettings, ImmutableOpenMap indices, ImmutableOpenMap templates, ImmutableOpenMap customs, String[] allIndices, String[] allOpenIndices, String[] allClosedIndices, @@ -248,21 +249,53 @@ public class MetaData implements Iterable, Diffable, To } /** - * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and - * that point to the specified concrete indices or match partially with the indices via wildcards. + * Finds the specific index aliases that point to the specified concrete indices or match partially with the indices via wildcards. * - * @param aliases The names of the index aliases to find * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. * @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are * present for that index */ - public ImmutableOpenMap> findAliases(final String[] aliases, String[] concreteIndices) { + public ImmutableOpenMap> findAllAliases(String[] concreteIndices) { + return findAliases(Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, concreteIndices); + } + + /** + * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and + * that point to the specified concrete indices or match partially with the indices via wildcards. + * + * @param aliasesRequest The request to find aliases for + * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. + * @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are + * present for that index + */ + public ImmutableOpenMap> findAliases(final AliasesRequest aliasesRequest, String[] concreteIndices) { + return findAliases(aliasesRequest.getOriginalAliases(), aliasesRequest.aliases(), concreteIndices); + } + + /** + * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and + * that point to the specified concrete indices or match partially with the indices via wildcards. + * + * @param aliases The aliases to look for + * @param originalAliases The original aliases that the user originally requested + * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. + * @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are + * present for that index + */ + private ImmutableOpenMap> findAliases(String[] originalAliases, String[] aliases, + String[] concreteIndices) { assert aliases != null; + assert originalAliases != null; assert concreteIndices != null; if (concreteIndices.length == 0) { return ImmutableOpenMap.of(); } + //if aliases were provided but they got replaced with empty aliases, return empty map + if (originalAliases.length > 0 && aliases.length == 0) { + return ImmutableOpenMap.of(); + } + boolean matchAllAliases = matchAllAliases(aliases); ImmutableOpenMap.Builder> mapBuilder = ImmutableOpenMap.builder(); for (String index : concreteIndices) { @@ -967,7 +1000,7 @@ public class MetaData implements Iterable, Diffable, To } public IndexGraveyard indexGraveyard() { - @SuppressWarnings("unchecked") IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE); + IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE); return graveyard; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java index ff7aab4a256..13cb85ea399 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java @@ -217,7 +217,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject { return false; } - @SuppressWarnings("unchecked") SnapshotRecoverySource that = (SnapshotRecoverySource) o; + SnapshotRecoverySource that = (SnapshotRecoverySource) o; return snapshot.equals(that.snapshot) && index.equals(that.index) && version.equals(that.version); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java index 850e8c9c142..7ce971958c9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AbstractAllocationDecision.java @@ -175,7 +175,7 @@ public abstract class AbstractAllocationDecision implements ToXContentFragment, if (other == null || other instanceof AbstractAllocationDecision == false) { return false; } - @SuppressWarnings("unchecked") AbstractAllocationDecision that = (AbstractAllocationDecision) other; + AbstractAllocationDecision that = (AbstractAllocationDecision) other; return Objects.equals(targetNode, that.targetNode) && Objects.equals(nodeDecisions, that.nodeDecisions); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java index fc2d81b38c4..c32d3e1518d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocateUnassignedDecision.java @@ -316,7 +316,7 @@ public class AllocateUnassignedDecision extends AbstractAllocationDecision { if (other instanceof AllocateUnassignedDecision == false) { return false; } - @SuppressWarnings("unchecked") AllocateUnassignedDecision that = (AllocateUnassignedDecision) other; + AllocateUnassignedDecision that = (AllocateUnassignedDecision) other; return Objects.equals(allocationStatus, that.allocationStatus) && Objects.equals(allocationId, that.allocationId) && reuseStore == that.reuseStore diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java index de9795ff4c2..9439187d739 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/MoveDecision.java @@ -300,7 +300,7 @@ public final class MoveDecision extends AbstractAllocationDecision { if (other instanceof MoveDecision == false) { return false; } - @SuppressWarnings("unchecked") MoveDecision that = (MoveDecision) other; + MoveDecision that = (MoveDecision) other; return Objects.equals(allocationDecision, that.allocationDecision) && Objects.equals(canRemainDecision, that.canRemainDecision) && Objects.equals(clusterRebalanceDecision, that.clusterRebalanceDecision) diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java index de14e0cd53d..3c4b35d5c34 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java @@ -54,7 +54,6 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple listeners.remove(listener); } - @SuppressWarnings({"unchecked"}) @Override public void start() { if (!lifecycle.canMoveToStarted()) { @@ -72,7 +71,6 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple protected abstract void doStart(); - @SuppressWarnings({"unchecked"}) @Override public void stop() { if (!lifecycle.canMoveToStopped()) { diff --git a/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java b/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java index ce63da62d8d..dfc216028c1 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java +++ b/server/src/main/java/org/elasticsearch/common/inject/ConstructorInjectorStore.java @@ -32,7 +32,6 @@ class ConstructorInjectorStore { private final FailableCache, ConstructorInjector> cache = new FailableCache, ConstructorInjector>() { @Override - @SuppressWarnings("unchecked") protected ConstructorInjector create(TypeLiteral type, Errors errors) throws ErrorsException { return createConstructor(type, errors); diff --git a/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java index e42082817c1..e2963864085 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/TypeConverterBindingProcessor.java @@ -101,7 +101,6 @@ class TypeConverterBindingProcessor extends AbstractProcessor { }, new TypeConverter() { @Override - @SuppressWarnings("unchecked") public Object convert(String value, TypeLiteral toType) { try { return Class.forName(value); @@ -128,7 +127,6 @@ class TypeConverterBindingProcessor extends AbstractProcessor { TypeConverter typeConverter = new TypeConverter() { @Override - @SuppressWarnings("unchecked") public Object convert(String value, TypeLiteral toType) { try { return parser.invoke(null, value); diff --git a/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java b/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java index cb434a90369..d676b19dddb 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/assistedinject/AssistedConstructor.java @@ -42,7 +42,6 @@ class AssistedConstructor { private final ParameterListKey assistedParameters; private final List allParameters; - @SuppressWarnings("unchecked") AssistedConstructor(Constructor constructor, List> parameterTypes) { this.constructor = constructor; diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java b/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java index 0cfafc4a30a..349935ac7c4 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/ProviderMethod.java @@ -97,7 +97,7 @@ public class ProviderMethod implements ProviderWithDependencies { try { // We know this cast is safe because T is the method's return type. - @SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"}) + @SuppressWarnings({"unchecked"}) T result = (T) method.invoke(instance, parameters); return result; } catch (IllegalAccessException e) { diff --git a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java index 5bc1595be5f..5447f2ca399 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/multibindings/Multibinder.java @@ -220,7 +220,6 @@ public abstract class Multibinder { } @Override - @SuppressWarnings("unchecked") public void configure(Binder binder) { checkConfiguration(!isInitialized(), "Multibinder was already initialized"); diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java b/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java index 75a3b615a10..0e4f7a80131 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultBindingTargetVisitor.java @@ -78,8 +78,7 @@ public abstract class DefaultBindingTargetVisitor implements BindingTarget // javac says it's an error to cast ProviderBinding to Binding @Override - @SuppressWarnings("unchecked") public V visit(ProviderBinding providerBinding) { - return visitOther((Binding) providerBinding); + return visitOther(providerBinding); } } diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index b11aa9d4a96..d7879b0d928 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -518,7 +518,6 @@ public abstract class StreamInput extends InputStream { return (Map) readGenericValue(); } - @SuppressWarnings({"unchecked"}) @Nullable public Object readGenericValue() throws IOException { byte type = readByte(); diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index 021e97767d8..a3f83609037 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -178,7 +178,6 @@ public class GetResult implements Streamable, Iterable, ToXConten /** * The source of the document (As a map). */ - @SuppressWarnings({"unchecked"}) public Map sourceAsMap() throws ElasticsearchParseException { if (source == null) { return null; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 87942260742..a0640ac68a9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -197,7 +197,6 @@ public class DocumentMapper implements ToXContentFragment { return mapping.root; } - @SuppressWarnings({"unchecked"}) public T metadataMapper(Class type) { return mapping.metadataMapper(type); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 936e7334002..921e472c94f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -427,7 +427,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { // the master node restoring mappings from disk or data nodes // deserializing cluster state that was sent by the master node, // this check will be skipped. - checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size()); + checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size() + fieldAliasMappers.size()); } results.put(newMapper.type(), newMapper); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index 52dfadfe273..a6a5fab0d04 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -230,7 +230,9 @@ public class TypeParsers { } else { throw new MapperParsingException("no type specified for property [" + multiFieldName + "]"); } - if (type.equals(ObjectMapper.CONTENT_TYPE) || type.equals(ObjectMapper.NESTED_CONTENT_TYPE)) { + if (type.equals(ObjectMapper.CONTENT_TYPE) + || type.equals(ObjectMapper.NESTED_CONTENT_TYPE) + || type.equals(FieldAliasMapper.CONTENT_TYPE)) { throw new MapperParsingException("Type [" + type + "] cannot be used in multi field"); } diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index ebebcfd253c..7e6a9c29a83 100644 --- a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -37,6 +37,7 @@ import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; /** * CircuitBreakerService that attempts to redistribute space between breakers @@ -215,7 +216,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { } // Manually add the parent breaker settings since they aren't part of the breaker map allStats.add(new CircuitBreakerStats(CircuitBreaker.PARENT, parentSettings.getLimit(), - parentUsed(0L), 1.0, parentTripCount.get())); + parentUsed(0L).totalUsage, 1.0, parentTripCount.get())); return new AllCircuitBreakerStats(allStats.toArray(new CircuitBreakerStats[allStats.size()])); } @@ -225,15 +226,26 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { return new CircuitBreakerStats(breaker.getName(), breaker.getLimit(), breaker.getUsed(), breaker.getOverhead(), breaker.getTrippedCount()); } - private long parentUsed(long newBytesReserved) { + private static class ParentMemoryUsage { + final long baseUsage; + final long totalUsage; + + ParentMemoryUsage(final long baseUsage, final long totalUsage) { + this.baseUsage = baseUsage; + this.totalUsage = totalUsage; + } + } + + private ParentMemoryUsage parentUsed(long newBytesReserved) { if (this.trackRealMemoryUsage) { - return currentMemoryUsage() + newBytesReserved; + final long current = currentMemoryUsage(); + return new ParentMemoryUsage(current, current + newBytesReserved); } else { long parentEstimated = 0; for (CircuitBreaker breaker : this.breakers.values()) { parentEstimated += breaker.getUsed() * breaker.getOverhead(); } - return parentEstimated; + return new ParentMemoryUsage(parentEstimated, parentEstimated); } } @@ -246,15 +258,37 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { * Checks whether the parent breaker has been tripped */ public void checkParentLimit(long newBytesReserved, String label) throws CircuitBreakingException { - long totalUsed = parentUsed(newBytesReserved); + final ParentMemoryUsage parentUsed = parentUsed(newBytesReserved); long parentLimit = this.parentSettings.getLimit(); - if (totalUsed > parentLimit) { + if (parentUsed.totalUsage > parentLimit) { this.parentTripCount.incrementAndGet(); - final String message = "[parent] Data too large, data for [" + label + "]" + - " would be [" + totalUsed + "/" + new ByteSizeValue(totalUsed) + "]" + + final StringBuilder message = new StringBuilder("[parent] Data too large, data for [" + label + "]" + + " would be [" + parentUsed.totalUsage + "/" + new ByteSizeValue(parentUsed.totalUsage) + "]" + ", which is larger than the limit of [" + - parentLimit + "/" + new ByteSizeValue(parentLimit) + "]"; - throw new CircuitBreakingException(message, totalUsed, parentLimit); + parentLimit + "/" + new ByteSizeValue(parentLimit) + "]"); + if (this.trackRealMemoryUsage) { + final long realUsage = parentUsed.baseUsage; + message.append(", real usage: ["); + message.append(realUsage); + message.append("/"); + message.append(new ByteSizeValue(realUsage)); + message.append("], new bytes reserved: ["); + message.append(newBytesReserved); + message.append("/"); + message.append(new ByteSizeValue(newBytesReserved)); + message.append("]"); + } else { + message.append(", usages ["); + message.append(String.join(", ", + this.breakers.entrySet().stream().map(e -> { + final CircuitBreaker breaker = e.getValue(); + final long breakerUsed = (long)(breaker.getUsed() * breaker.getOverhead()); + return e.getKey() + "=" + breakerUsed + "/" + new ByteSizeValue(breakerUsed); + }) + .collect(Collectors.toList()))); + message.append("]"); + } + throw new CircuitBreakingException(message.toString(), parentUsed.totalUsage, parentLimit); } } diff --git a/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java b/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java index 87ea08dc74d..59523f33901 100644 --- a/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java +++ b/server/src/main/java/org/elasticsearch/persistent/NodePersistentTasksExecutor.java @@ -45,7 +45,6 @@ public class NodePersistentTasksExecutor { task.markAsFailed(e); } - @SuppressWarnings("unchecked") @Override protected void doRun() throws Exception { try { diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java index 9ed0af010b5..4cb8c722f26 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksClusterService.java @@ -85,7 +85,6 @@ public class PersistentTasksClusterService extends AbstractComponent implements listener.onFailure(e); } - @SuppressWarnings("unchecked") @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { PersistentTasksCustomMetaData tasks = newState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java index 2ac57e074b7..a8f9c73ab32 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksExecutorRegistry.java @@ -33,7 +33,6 @@ public class PersistentTasksExecutorRegistry extends AbstractComponent { private final Map> taskExecutors; - @SuppressWarnings("unchecked") public PersistentTasksExecutorRegistry(Settings settings, Collection> taskExecutors) { super(settings); Map> map = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/repositories/IndexId.java b/server/src/main/java/org/elasticsearch/repositories/IndexId.java index 469caa26b64..2a3d9f15d16 100644 --- a/server/src/main/java/org/elasticsearch/repositories/IndexId.java +++ b/server/src/main/java/org/elasticsearch/repositories/IndexId.java @@ -89,7 +89,7 @@ public final class IndexId implements Writeable, ToXContentObject { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") IndexId that = (IndexId) o; + IndexId that = (IndexId) o; return Objects.equals(name, that.name) && Objects.equals(id, that.id); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 7a8d8327d5e..a97cf4bb419 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -238,7 +238,7 @@ public final class RepositoryData { if (obj == null || getClass() != obj.getClass()) { return false; } - @SuppressWarnings("unchecked") RepositoryData that = (RepositoryData) obj; + RepositoryData that = (RepositoryData) obj; return snapshotIds.equals(that.snapshotIds) && snapshotStates.equals(that.snapshotStates) && indices.equals(that.indices) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 1428a31a8de..e93266db805 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -247,7 +247,6 @@ public class InternalComposite this.formats = formats; } - @SuppressWarnings("unchecked") InternalBucket(StreamInput in, List sourceNames, List formats, int[] reverseMuls) throws IOException { this.key = new CompositeKey(in); this.docCount = in.readVLong(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index d998beedf14..c490b344bdb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -358,7 +357,6 @@ public class RangeAggregator extends BucketsAggregator { private final InternalRange.Factory factory; private final DocValueFormat format; - @SuppressWarnings("unchecked") public Unmapped(String name, R[] ranges, boolean keyed, DocValueFormat format, SearchContext context, Aggregator parent, InternalRange.Factory factory, List pipelineAggregators, Map metaData) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java index d49da4658ae..185e1c63b98 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java @@ -62,7 +62,6 @@ public class MovFnPipelineAggregationBuilder extends AbstractPipelineAggregation private static final Function> PARSER = name -> { - @SuppressWarnings("unchecked") ConstructingObjectParser parser = new ConstructingObjectParser<>( MovFnPipelineAggregationBuilder.NAME, false, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java index b33ba879660..0e354e14a37 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java @@ -54,7 +54,6 @@ public class ValuesSourceConfig { if (field == null) { if (script == null) { - @SuppressWarnings("unchecked") ValuesSourceConfig config = new ValuesSourceConfig<>(ValuesSourceType.ANY); config.format(resolveFormat(null, valueType)); return config; diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 30f1dfb14fc..6adad6dabf0 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -90,7 +90,6 @@ public class GeoDistanceSortBuilder extends SortBuilder private DistanceUnit unit = DistanceUnit.DEFAULT; private SortMode sortMode = null; - @SuppressWarnings("rawtypes") private QueryBuilder nestedFilter; private String nestedPath; diff --git a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java index 9537e288919..9d2a5c9f1e2 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java @@ -22,9 +22,7 @@ package org.elasticsearch.search.sort; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ToChildBlockJoinQuery; -import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -186,10 +184,21 @@ public abstract class SortBuilder> implements NamedWrit } protected static Nested resolveNested(QueryShardContext context, NestedSortBuilder nestedSort) throws IOException { - return resolveNested(context, nestedSort, null); + final Query childQuery = resolveNestedQuery(context, nestedSort, null); + if (childQuery == null) { + return null; + } + final ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); + final Query parentQuery; + if (objectMapper == null) { + parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated()); + } else { + parentQuery = objectMapper.nestedTypeFilter(); + } + return new Nested(context.bitsetFilter(parentQuery), childQuery); } - private static Nested resolveNested(QueryShardContext context, NestedSortBuilder nestedSort, Nested nested) throws IOException { + private static Query resolveNestedQuery(QueryShardContext context, NestedSortBuilder nestedSort, Query parentQuery) throws IOException { if (nestedSort == null || nestedSort.getPath() == null) { return null; } @@ -207,15 +216,7 @@ public abstract class SortBuilder> implements NamedWrit if (!nestedObjectMapper.nested().isNested()) { throw new QueryShardException(context, "[nested] nested object under path [" + nestedPath + "] is not of nested type"); } - - // get our parent query which will determines our parent documents - Query parentQuery; ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); - if (objectMapper == null) { - parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated()); - } else { - parentQuery = objectMapper.nestedTypeFilter(); - } // get our child query, potentially applying a users filter Query childQuery; @@ -223,7 +224,7 @@ public abstract class SortBuilder> implements NamedWrit context.nestedScope().nextLevel(nestedObjectMapper); if (nestedFilter != null) { assert nestedFilter == Rewriteable.rewrite(nestedFilter, context) : "nested filter is not rewritten"; - if (nested == null) { + if (parentQuery == null) { // this is for back-compat, original single level nested sorting never applied a nested type filter childQuery = nestedFilter.toFilter(context); } else { @@ -237,27 +238,23 @@ public abstract class SortBuilder> implements NamedWrit } // apply filters from the previous nested level - if (nested != null) { - parentQuery = Queries.filtered(parentQuery, - new ToParentBlockJoinQuery(nested.getInnerQuery(), nested.getRootFilter(), ScoreMode.None)); - + if (parentQuery != null) { if (objectMapper != null) { childQuery = Queries.filtered(childQuery, - new ToChildBlockJoinQuery(nested.getInnerQuery(), context.bitsetFilter(objectMapper.nestedTypeFilter()))); + new ToChildBlockJoinQuery(parentQuery, context.bitsetFilter(objectMapper.nestedTypeFilter()))); } } // wrap up our parent and child and either process the next level of nesting or return - final Nested innerNested = new Nested(context.bitsetFilter(parentQuery), childQuery); if (nestedNestedSort != null) { try { context.nestedScope().nextLevel(nestedObjectMapper); - return resolveNested(context, nestedNestedSort, innerNested); + return resolveNestedQuery(context, nestedNestedSort, childQuery); } finally { context.nestedScope().previousLevel(); } } else { - return innerNested; + return childQuery; } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java b/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java index 314cd4053dd..2847af386b2 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java +++ b/server/src/main/java/org/elasticsearch/snapshots/Snapshot.java @@ -80,7 +80,7 @@ public final class Snapshot implements Writeable { if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") Snapshot that = (Snapshot) o; + Snapshot that = (Snapshot) o; return repository.equals(that.repository) && snapshotId.equals(that.snapshotId); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java index b80dfd94d75..7a8848618c2 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotId.java @@ -22,7 +22,6 @@ package org.elasticsearch.snapshots; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -98,7 +97,7 @@ public final class SnapshotId implements Comparable, Writeable, ToXC if (o == null || getClass() != o.getClass()) { return false; } - @SuppressWarnings("unchecked") final SnapshotId that = (SnapshotId) o; + final SnapshotId that = (SnapshotId) o; return name.equals(that.name) && uuid.equals(that.uuid); } diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index 51a4adec8d1..2d3be2435b4 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -21,7 +21,6 @@ package org.elasticsearch.threadpool; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.Counter; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; @@ -38,6 +37,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.XRejectedExecutionHandler; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.node.Node; import java.io.Closeable; @@ -197,7 +197,7 @@ public class ThreadPool extends AbstractComponent implements Scheduler, Closeabl threadContext = new ThreadContext(settings); final Map executors = new HashMap<>(); - for (@SuppressWarnings("unchecked") final Map.Entry entry : builders.entrySet()) { + for (final Map.Entry entry : builders.entrySet()) { final ExecutorBuilder.ExecutorSettings executorSettings = entry.getValue().getSettings(settings); final ExecutorHolder executorHolder = entry.getValue().build(executorSettings, threadContext); if (executors.containsKey(executorHolder.info.getName())) { @@ -338,6 +338,7 @@ public class ThreadPool extends AbstractComponent implements Scheduler, Closeabl * the ScheduledFuture will cannot interact with it. * @throws org.elasticsearch.common.util.concurrent.EsRejectedExecutionException if the task cannot be scheduled for execution */ + @Override public ScheduledFuture schedule(TimeValue delay, String executor, Runnable command) { if (!Names.SAME.equals(executor)) { command = new ThreadedRunnable(command, executor(executor)); @@ -358,6 +359,7 @@ public class ThreadPool extends AbstractComponent implements Scheduler, Closeabl command, executor), e)); } + @Override public Runnable preserveContext(Runnable command) { return getThreadContext().preserveContext(command); } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java b/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java index 447bbd92dd2..fbe477ad04b 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportResponseHandler.java @@ -40,7 +40,6 @@ public interface TransportResponseHandler extends W * * @return the deserialized response. */ - @SuppressWarnings("deprecation") @Override default T read(StreamInput in) throws IOException { T instance = newInstance(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 264a92137be..f53eb63bc10 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -153,7 +153,6 @@ public class IndicesStatsTests extends ESSingleNodeTestCase { assertEquals(0, common.refresh.getListeners()); } - @SuppressWarnings("unchecked") public void testUuidOnRootStatsIndices() { String uuid = createIndex("test").indexUUID(); IndicesStatsResponse rsp = client().admin().indices().prepareStats().get(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 38e3fcc6ea7..9d82e9e1cdc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; @@ -41,6 +42,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; @@ -50,6 +52,63 @@ import static org.hamcrest.Matchers.startsWith; public class MetaDataTests extends ESTestCase { + public void testFindAliases() { + MetaData metaData = MetaData.builder().put(IndexMetaData.builder("index") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias1").build()) + .putAlias(AliasMetaData.builder("alias2").build())).build(); + + { + ImmutableOpenMap> aliases = metaData.findAliases(new GetAliasesRequest(), Strings.EMPTY_ARRAY); + assertThat(aliases.size(), equalTo(0)); + } + { + ImmutableOpenMap> aliases = metaData.findAliases(new GetAliasesRequest(), new String[]{"index"}); + assertThat(aliases.size(), equalTo(1)); + List aliasMetaDataList = aliases.get("index"); + assertThat(aliasMetaDataList.size(), equalTo(2)); + assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1")); + assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2")); + } + { + GetAliasesRequest getAliasesRequest = new GetAliasesRequest("alias1"); + getAliasesRequest.replaceAliases(Strings.EMPTY_ARRAY); + ImmutableOpenMap> aliases = metaData.findAliases(getAliasesRequest, new String[]{"index"}); + assertThat(aliases.size(), equalTo(0)); + } + { + ImmutableOpenMap> aliases = + metaData.findAliases(new GetAliasesRequest("alias*"), new String[]{"index"}); + assertThat(aliases.size(), equalTo(1)); + List aliasMetaDataList = aliases.get("index"); + assertThat(aliasMetaDataList.size(), equalTo(2)); + assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1")); + assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2")); + } + { + ImmutableOpenMap> aliases = + metaData.findAliases(new GetAliasesRequest("alias1"), new String[]{"index"}); + assertThat(aliases.size(), equalTo(1)); + List aliasMetaDataList = aliases.get("index"); + assertThat(aliasMetaDataList.size(), equalTo(1)); + assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1")); + } + { + ImmutableOpenMap> aliases = metaData.findAllAliases(new String[]{"index"}); + assertThat(aliases.size(), equalTo(1)); + List aliasMetaDataList = aliases.get("index"); + assertThat(aliasMetaDataList.size(), equalTo(2)); + assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1")); + assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2")); + } + { + ImmutableOpenMap> aliases = metaData.findAllAliases(Strings.EMPTY_ARRAY); + assertThat(aliases.size(), equalTo(0)); + } + } + public void testIndexAndAliasWithSameName() { IndexMetaData.Builder builder = IndexMetaData.builder("index") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) @@ -613,7 +672,6 @@ public class MetaDataTests extends ESTestCase { public static void assertLeafs(Map properties, String... fields) { for (String field : fields) { assertTrue(properties.containsKey(field)); - @SuppressWarnings("unchecked") Map fieldProp = (Map)properties.get(field); assertNotNull(fieldProp); assertFalse(fieldProp.containsKey("properties")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java index 268b03d046c..8e164c86ebe 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperParserTests.java @@ -76,4 +76,28 @@ public class DocumentMapperParserTests extends ESSingleNodeTestCase { mapperParser.parse("type", new CompressedXContent(mapping))); assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] of different type")); } + + public void testMultiFieldsWithFieldAlias() throws Exception { + IndexService indexService = createIndex("test"); + DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "text") + .startObject("fields") + .startObject("alias") + .field("type", "alias") + .field("path", "other-field") + .endObject() + .endObject() + .endObject() + .startObject("other-field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject().endObject()); + MapperParsingException e = expectThrows(MapperParsingException.class, () -> + mapperParser.parse("type", new CompressedXContent(mapping))); + assertEquals("Type [alias] cannot be used in multi field", e.getMessage()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 20e0dd4639c..51b6e9d7168 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -270,6 +270,37 @@ public class MapperServiceTests extends ESSingleNodeTestCase { assertThat(e.getMessage(), containsString("Invalid [path] value [nested.field] for field alias [alias]")); } + public void testTotalFieldsLimitWithFieldAlias() throws Throwable { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("alias") + .field("type", "alias") + .field("path", "field") + .endObject() + .startObject("field") + .field("type", "text") + .endObject() + .endObject() + .endObject().endObject()); + + DocumentMapper documentMapper = createIndex("test1").mapperService() + .merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + // Set the total fields limit to the number of non-alias fields, to verify that adding + // a field alias pushes the mapping over the limit. + int numFields = documentMapper.mapping().metadataMappers.length + 2; + int numNonAliasFields = numFields - 1; + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { + Settings settings = Settings.builder() + .put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), numNonAliasFields) + .build(); + createIndex("test2", settings).mapperService() + .merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + }); + assertEquals("Limit of total fields [" + numNonAliasFields + "] in index [test2] has been exceeded", e.getMessage()); + } + public void testForbidMultipleTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 4b9e0f5a66e..591ee5af080 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -42,7 +42,6 @@ import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; @@ -51,7 +50,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -74,7 +72,6 @@ import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQuery import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertDisjunctionSubQuery; -import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -381,11 +378,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase> books = new ArrayList<>(); + { + List book = new ArrayList<>(); + Document document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "Paragraph 1", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 743)); + document.add(new IntPoint("chapters.paragraphs.word_count", 743)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "chapter 3", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 400)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 400)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "Paragraph 1", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 234)); + document.add(new IntPoint("chapters.paragraphs.word_count", 234)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "chapter 2", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 200)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 200)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "Paragraph 2", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 478)); + document.add(new IntPoint("chapters.paragraphs.word_count", 478)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "Paragraph 1", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 849)); + document.add(new IntPoint("chapters.paragraphs.word_count", 849)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "chapter 1", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 1400)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 1400)); + book.add(document); + document = new Document(); + document.add(new StringField("genre", "science fiction", Field.Store.NO)); + document.add(new StringField("_type", "_doc", Field.Store.NO)); + document.add(new StringField("_id", "1", Field.Store.YES)); + document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0)); + book.add(document); + books.add(book); + } + { + List book = new ArrayList<>(); + Document document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "Introduction", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 76)); + document.add(new IntPoint("chapters.paragraphs.word_count", 76)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "chapter 1", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 20)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 20)); + book.add(document); + document = new Document(); + document.add(new StringField("genre", "romance", Field.Store.NO)); + document.add(new StringField("_type", "_doc", Field.Store.NO)); + document.add(new StringField("_id", "2", Field.Store.YES)); + document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0)); + book.add(document); + books.add(book); + } + { + List book = new ArrayList<>(); + Document document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "A bad dream", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 976)); + document.add(new IntPoint("chapters.paragraphs.word_count", 976)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "The beginning of the end", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 1200)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 1200)); + book.add(document); + document = new Document(); + document.add(new StringField("genre", "horror", Field.Store.NO)); + document.add(new StringField("_type", "_doc", Field.Store.NO)); + document.add(new StringField("_id", "3", Field.Store.YES)); + document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0)); + book.add(document); + books.add(book); + } + { + List book = new ArrayList<>(); + Document document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "macaroni", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 180)); + document.add(new IntPoint("chapters.paragraphs.word_count", 180)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "hamburger", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 150)); + document.add(new IntPoint("chapters.paragraphs.word_count", 150)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "tosti", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 120)); + document.add(new IntPoint("chapters.paragraphs.word_count", 120)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "easy meals", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 800)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 800)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.paragraphs.header", "introduction", Field.Store.NO)); + document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO)); + document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO)); + document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 87)); + document.add(new IntPoint("chapters.paragraphs.word_count", 87)); + book.add(document); + document = new Document(); + document.add(new TextField("chapters.title", "introduction", Field.Store.NO)); + document.add(new StringField("_type", "__chapters", Field.Store.NO)); + document.add(new IntPoint("chapters.read_time_seconds", 10)); + document.add(new NumericDocValuesField("chapters.read_time_seconds", 10)); + book.add(document); + document = new Document(); + document.add(new StringField("genre", "cooking", Field.Store.NO)); + document.add(new StringField("_type", "_doc", Field.Store.NO)); + document.add(new StringField("_id", "4", Field.Store.YES)); + document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0)); + book.add(document); + books.add(book); + } + { + List book = new ArrayList<>(); + Document document = new Document(); + document.add(new StringField("genre", "unknown", Field.Store.NO)); + document.add(new StringField("_type", "_doc", Field.Store.NO)); + document.add(new StringField("_id", "5", Field.Store.YES)); + document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0)); + book.add(document); + books.add(book); + } + + Collections.shuffle(books, random()); + for (List book : books) { + writer.addDocuments(book); + if (randomBoolean()) { + writer.commit(); + } + } + DirectoryReader reader = DirectoryReader.open(writer); + reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0)); + IndexSearcher searcher = new IndexSearcher(reader); + QueryShardContext queryShardContext = indexService.newQueryShardContext(0, reader, () -> 0L, null); + + FieldSortBuilder sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count"); + sortBuilder.setNestedSort(new NestedSortBuilder("chapters").setNestedSort(new NestedSortBuilder("chapters.paragraphs"))); + QueryBuilder queryBuilder = new MatchAllQueryBuilder(); + TopFieldDocs topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(5L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); + assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("1")); + assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(234L)); + assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("3")); + assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(976L)); + assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MAX_VALUE)); + + // Specific genre + { + queryBuilder = new TermQueryBuilder("genre", "romance"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); + + queryBuilder = new TermQueryBuilder("genre", "science fiction"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(234L)); + + queryBuilder = new TermQueryBuilder("genre", "horror"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); + + queryBuilder = new TermQueryBuilder("genre", "cooking"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); + } + + // reverse sort order + { + sortBuilder.order(SortOrder.DESC); + queryBuilder = new MatchAllQueryBuilder(); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(5L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(849L)); + assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(180L)); + assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(76L)); + assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5")); + assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MIN_VALUE)); + } + + // Specific genre and reverse sort order + { + queryBuilder = new TermQueryBuilder("genre", "romance"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); + + queryBuilder = new TermQueryBuilder("genre", "science fiction"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(849L)); + + queryBuilder = new TermQueryBuilder("genre", "horror"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); + + queryBuilder = new TermQueryBuilder("genre", "cooking"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(180L)); + } + + // Nested filter + query + { + queryBuilder = new RangeQueryBuilder("chapters.read_time_seconds").to(50L); + sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count"); + sortBuilder.setNestedSort( + new NestedSortBuilder("chapters") + .setFilter(queryBuilder) + .setNestedSort(new NestedSortBuilder("chapters.paragraphs")) + ); + topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(2L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L)); + + sortBuilder.order(SortOrder.DESC); + topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(2L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(76L)); + } + + // Multiple Nested filters + query + { + queryBuilder = new RangeQueryBuilder("chapters.read_time_seconds").to(50L); + sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count"); + sortBuilder.setNestedSort( + new NestedSortBuilder("chapters") + .setFilter(queryBuilder) + .setNestedSort( + new NestedSortBuilder("chapters.paragraphs") + .setFilter(new RangeQueryBuilder("chapters.paragraphs.word_count").from(80L)) + ) + ); + topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(2L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MAX_VALUE)); + + sortBuilder.order(SortOrder.DESC); + topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(2L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); + assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MIN_VALUE)); + } + + // Nested filter + Specific genre + { + sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count"); + sortBuilder.setNestedSort( + new NestedSortBuilder("chapters") + .setFilter(new RangeQueryBuilder("chapters.read_time_seconds").to(50L)) + .setNestedSort(new NestedSortBuilder("chapters.paragraphs")) + ); + + queryBuilder = new TermQueryBuilder("genre", "romance"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); + + queryBuilder = new TermQueryBuilder("genre", "science fiction"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); + + queryBuilder = new TermQueryBuilder("genre", "horror"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); + + queryBuilder = new TermQueryBuilder("genre", "cooking"); + topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); + assertThat(topFields.totalHits, equalTo(1L)); + assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); + assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); + } + } + + private static TopFieldDocs search(QueryBuilder queryBuilder, FieldSortBuilder sortBuilder, QueryShardContext queryShardContext, + IndexSearcher searcher) throws IOException { + Query query = new BooleanQuery.Builder() + .add(queryBuilder.toQuery(queryShardContext), Occur.MUST) + .add(Queries.newNonNestedFilter(Version.CURRENT), Occur.FILTER) + .build(); + Sort sort = new Sort(sortBuilder.build(queryShardContext).field); + return searcher.search(query, 10, sort); + } + } diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java index a73cf8630fe..6f8689a9664 100644 --- a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java @@ -199,6 +199,8 @@ public class HierarchyCircuitBreakerServiceTests extends ESTestCase { .addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), "should break")); assertThat(exception.getMessage(), containsString("[parent] Data too large, data for [should break] would be")); assertThat(exception.getMessage(), containsString("which is larger than the limit of [209715200/200mb]")); + assertThat(exception.getMessage(), + containsString("usages [request=157286400/150mb, fielddata=54001664/51.5mb, in_flight_requests=0/0b, accounting=0/0b]")); } } @@ -239,6 +241,9 @@ public class HierarchyCircuitBreakerServiceTests extends ESTestCase { // it was the parent that rejected the reservation assertThat(exception.getMessage(), containsString("[parent] Data too large, data for [request] would be")); assertThat(exception.getMessage(), containsString("which is larger than the limit of [200/200b]")); + assertThat(exception.getMessage(), + containsString("real usage: [181/181b], new bytes reserved: [" + (reservationInBytes * 2) + + "/" + new ByteSizeValue(reservationInBytes * 2) + "]")); assertEquals(0, requestBreaker.getTrippedCount()); assertEquals(1, service.stats().getStats(CircuitBreaker.PARENT).getTrippedCount()); diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index 1abdb97f174..0137f136d3e 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -123,7 +123,7 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { logger.info("--> make sure the node's repository can resolve the snapshots"); final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - @SuppressWarnings("unchecked") final BlobStoreRepository repository = + final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repositoryName); final List originalSnapshots = Arrays.asList(snapshotId1, snapshotId2); @@ -245,7 +245,7 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - @SuppressWarnings("unchecked") final BlobStoreRepository repository = + final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repositoryName); assertThat("getBlobContainer has to be lazy initialized", repository.getBlobContainer(), nullValue()); return repository; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index d216709791e..2876fbbaa25 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -32,9 +32,9 @@ import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; @@ -84,7 +84,6 @@ public class DoubleTermsIT extends AbstractTermsTestCase { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java index bce4006fa10..e7e64027274 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java @@ -81,7 +81,6 @@ public class LongTermsIT extends AbstractTermsTestCase { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java index 4a85c2c1453..eeb6e121613 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -77,7 +78,6 @@ public class MinDocCountIT extends AbstractTermsTestCase { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index edc29b0d2c5..99aeac167e0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -75,7 +75,6 @@ public class RangeIT extends ESIntegTestCase { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index 0ed1dacb73f..b0263cb2dbd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -1746,7 +1746,6 @@ public class CompositeAggregatorTests extends AggregatorTestCase { } } - @SuppressWarnings("unchecked") private static Map createAfterKey(Object... fields) { assert fields.length % 2 == 0; final Map map = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index 389371efd79..96811ce424c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -117,6 +117,12 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati return ParsedAutoDateHistogram.class; } + @Override + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32215") + public void testReduceRandom() { + super.testReduceRandom(); + } + @Override protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram instance) { String name = instance.getName(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index 3b7e686ef4d..160e51a67b2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -36,9 +36,6 @@ import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; -import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; -import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.stats.Stats; @@ -103,7 +100,6 @@ public class StringTermsIT extends AbstractTermsTestCase { public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = super.pluginScripts(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index fa46921a941..f5dc01f1914 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -262,7 +262,6 @@ public class CumulativeSumAggregatorTests extends AggregatorTestCase { }); } - @SuppressWarnings("unchecked") private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer verify) throws IOException { executeTestCase(query, aggBuilder, verify, indexWriter -> { Document document = new Document(); @@ -282,7 +281,6 @@ public class CumulativeSumAggregatorTests extends AggregatorTestCase { }); } - @SuppressWarnings("unchecked") private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer verify, CheckedConsumer setup) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java index 88bbe3671b2..db3f2d745e1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java @@ -98,7 +98,6 @@ public class MovFnUnitTests extends AggregatorTestCase { } - @SuppressWarnings("unchecked") private void executeTestCase(Query query, DateHistogramAggregationBuilder aggBuilder, Consumer verify, diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index 2126e0e94eb..31fa4f838df 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -96,7 +96,6 @@ public class SearchFieldsIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); @@ -143,7 +142,6 @@ public class SearchFieldsIT extends ESIntegTestCase { return scripts; } - @SuppressWarnings("unchecked") static Object fieldsScript(Map vars, String fieldName) { Map fields = (Map) vars.get("_fields"); FieldLookup fieldLookup = (FieldLookup) fields.get(fieldName); @@ -156,7 +154,6 @@ public class SearchFieldsIT extends ESIntegTestCase { return XContentMapValues.extractValue(path, source); } - @SuppressWarnings("unchecked") static Object docScript(Map vars, String fieldName) { Map doc = (Map) vars.get("doc"); ScriptDocValues values = (ScriptDocValues) doc.get(fieldName); diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index 12e48a3ae4f..fc11554dfb3 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.ESIntegTestCase; @@ -48,8 +49,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; - -import org.elasticsearch.script.ScriptType; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -70,7 +69,6 @@ public class FunctionScoreIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); scripts.put("1", vars -> 1.0d); diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 257089c9054..8203dac1a2d 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -67,7 +67,6 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); @@ -84,7 +83,6 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { return scripts; } - @SuppressWarnings("unchecked") static Double scoringScript(Map vars, Function scoring) { Map doc = (Map) vars.get("doc"); Double index = ((Number) ((ScriptDocValues) doc.get("index")).getValues().get(0)).doubleValue(); diff --git a/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 68ef78f4273..6d8bcfb6131 100644 --- a/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -709,6 +709,79 @@ public class SimpleNestedIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("1")); // missing last } + // https://github.com/elastic/elasticsearch/issues/31554 + public void testLeakingSortValues() throws Exception { + assertAcked(prepareCreate("test") + .setSettings(Settings.builder().put("number_of_shards", 1)) + .addMapping("test-type", "{\n" + + " \"dynamic\": \"strict\",\n" + + " \"properties\": {\n" + + " \"nested1\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"nested2\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"nested2_keyword\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"sortVal\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n", XContentType.JSON)); + ensureGreen(); + + client().prepareIndex("test", "test-type", "1").setSource("{\n" + + " \"nested1\": [\n" + + " {\n" + + " \"nested2\": [\n" + + " {\n" + + " \"nested2_keyword\": \"nested2_bar\",\n" + + " \"sortVal\": 1\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + "}", XContentType.JSON).execute().actionGet(); + + client().prepareIndex("test", "test-type", "2").setSource("{\n" + + " \"nested1\": [\n" + + " {\n" + + " \"nested2\": [\n" + + " {\n" + + " \"nested2_keyword\": \"nested2_bar\",\n" + + " \"sortVal\": 2\n" + + " }\n" + + " ]\n" + + " } \n" + + " ]\n" + + "}", XContentType.JSON).execute().actionGet(); + + refresh(); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(termQuery("_id", 2)) + .addSort( + SortBuilders + .fieldSort("nested1.nested2.sortVal") + .setNestedSort(new NestedSortBuilder("nested1") + .setNestedSort(new NestedSortBuilder("nested1.nested2") + .setFilter(termQuery("nested1.nested2.nested2_keyword", "nested2_bar")))) + ) + .execute().actionGet(); + + assertHitCount(searchResponse, 1); + assertThat(searchResponse.getHits().getHits().length, equalTo(1)); + assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("2")); + + } + public void testSortNestedWithNestedFilter() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index ff0196aacdf..40d6b26b4f9 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -86,7 +86,6 @@ import static org.hamcrest.Matchers.nullValue; public class FieldSortIT extends ESIntegTestCase { public static class CustomScriptPlugin extends MockScriptPlugin { @Override - @SuppressWarnings("unchecked") protected Map, Object>> pluginScripts() { Map, Object>> scripts = new HashMap<>(); scripts.put("doc['number'].value", vars -> sortDoubleScript(vars)); @@ -94,14 +93,12 @@ public class FieldSortIT extends ESIntegTestCase { return scripts; } - @SuppressWarnings("unchecked") static Double sortDoubleScript(Map vars) { Map doc = (Map) vars.get("doc"); Double index = ((Number) ((ScriptDocValues) doc.get("number")).getValues().get(0)).doubleValue(); return index; } - @SuppressWarnings("unchecked") static String sortStringScript(Map vars) { Map doc = (Map) vars.get("doc"); String value = ((String) ((ScriptDocValues) doc.get("keyword")).getValues().get(0)); diff --git a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java index aa49bed6975..6668c1be0e4 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; import org.elasticsearch.test.ESIntegTestCase; @@ -50,8 +51,6 @@ import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; - -import org.elasticsearch.script.ScriptType; import static org.elasticsearch.search.sort.SortBuilders.scriptSort; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -104,7 +103,6 @@ public class SimpleSortIT extends ESIntegTestCase { /** * Return the minimal value from a set of values. */ - @SuppressWarnings("unchecked") static > T getMinValueScript(Map vars, T initialValue, String fieldName, Function converter) { T retval = initialValue; diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 439728bac9e..cef44ed17fd 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -248,7 +248,7 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase logger.info("--> verify index folder deleted from blob container"); RepositoriesService repositoriesSvc = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName()); ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, internalCluster().getMasterName()); - @SuppressWarnings("unchecked") BlobStoreRepository repository = (BlobStoreRepository) repositoriesSvc.repository(repoName); + BlobStoreRepository repository = (BlobStoreRepository) repositoriesSvc.repository(repoName); final SetOnce indicesBlobContainer = new SetOnce<>(); final SetOnce repositoryData = new SetOnce<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 4c00527a932..13540eaec95 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -29,11 +29,16 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; + import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.AbstractAppender; import org.apache.logging.log4j.core.config.Configurator; +import org.apache.logging.log4j.core.layout.PatternLayout; import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; import org.apache.logging.log4j.status.StatusLogger; @@ -183,6 +188,8 @@ public abstract class ESTestCase extends LuceneTestCase { private static final AtomicInteger portGenerator = new AtomicInteger(); + private static final Collection nettyLoggedLeaks = new ArrayList<>(); + @AfterClass public static void resetPortCounter() { portGenerator.set(0); @@ -192,8 +199,28 @@ public abstract class ESTestCase extends LuceneTestCase { System.setProperty("log4j.shutdownHookEnabled", "false"); System.setProperty("log4j2.disable.jmx", "true"); + // Enable Netty leak detection and monitor logger for logged leak errors + System.setProperty("io.netty.leakDetection.level", "advanced"); + String leakLoggerName = "io.netty.util.ResourceLeakDetector"; + Logger leakLogger = LogManager.getLogger(leakLoggerName); + Appender leakAppender = new AbstractAppender(leakLoggerName, null, + PatternLayout.newBuilder().withPattern("%m").build()) { + @Override + public void append(LogEvent event) { + String message = event.getMessage().getFormattedMessage(); + if (Level.ERROR.equals(event.getLevel()) && message.contains("LEAK:")) { + synchronized (nettyLoggedLeaks) { + nettyLoggedLeaks.add(message); + } + } + } + }; + leakAppender.start(); + Loggers.addAppender(leakLogger, leakAppender); + // shutdown hook so that when the test JVM exits, logging is shutdown too Runtime.getRuntime().addShutdownHook(new Thread(() -> { + leakAppender.stop(); LoggerContext context = (LoggerContext) LogManager.getContext(false); Configurator.shutdown(context); })); @@ -440,6 +467,13 @@ public abstract class ESTestCase extends LuceneTestCase { statusData.clear(); } } + synchronized (nettyLoggedLeaks) { + try { + assertThat(nettyLoggedLeaks, empty()); + } finally { + nettyLoggedLeaks.clear(); + } + } } // this must be a separate method from other ensure checks above so suite scoped integ tests can call...TODO: fix that @@ -1037,7 +1071,6 @@ public abstract class ESTestCase extends LuceneTestCase { List targetList = new ArrayList<>(); for(Object value : list) { if (value instanceof Map) { - @SuppressWarnings("unchecked") LinkedHashMap valueMap = (LinkedHashMap) value; targetList.add(shuffleMap(valueMap, exceptFields)); } else if(value instanceof List) { @@ -1057,7 +1090,6 @@ public abstract class ESTestCase extends LuceneTestCase { for (String key : keys) { Object value = map.get(key); if (value instanceof Map && exceptFields.contains(key) == false) { - @SuppressWarnings("unchecked") LinkedHashMap valueMap = (LinkedHashMap) value; targetMap.put(key, shuffleMap(valueMap, exceptFields)); } else if(value instanceof List && exceptFields.contains(key) == false) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index cf3cc39d34d..48fc7982074 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -49,6 +49,8 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; @@ -685,7 +687,6 @@ public class ElasticsearchAssertions { /** * Compares two lists recursively, but using arrays comparisons for byte[] through Arrays.equals(byte[], byte[]) */ - @SuppressWarnings("unchecked") private static void assertListEquals(List expected, List actual) { assertEquals(expected.size(), actual.size()); Iterator actualIterator = actual.iterator(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java index 39a1f1d3780..494d65e05de 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java @@ -54,7 +54,6 @@ public class GreaterThanAssertion extends Assertion { } @Override - @SuppressWarnings("unchecked") protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is greater than [{}] (field: [{}])", actualValue, expectedValue, getField()); assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", diff --git a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java index 84c82f4159d..6d1e5116474 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java @@ -64,7 +64,7 @@ public class MockTcpTransportTests extends AbstractSimpleTransportTestCase { @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { final MockTcpTransport t = (MockTcpTransport) transport; - @SuppressWarnings("unchecked") final TcpTransport.NodeChannels channels = + final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java index cf9eb5d7a8c..108411dee5b 100644 --- a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java +++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleMockNioTransportTests.java @@ -96,7 +96,6 @@ public class SimpleMockNioTransportTests extends AbstractSimpleTransportTestCase @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { - @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/x-pack/docs/en/security/limitations.asciidoc b/x-pack/docs/en/security/limitations.asciidoc index c127ee3d796..fb8b826d5dd 100644 --- a/x-pack/docs/en/security/limitations.asciidoc +++ b/x-pack/docs/en/security/limitations.asciidoc @@ -19,8 +19,6 @@ with {security} enabled. Elasticsearch clusters with {security} enabled apply the `/_all` wildcard, and all other wildcards, to the indices that the current user has privileges for, not the set of all indices on the cluster. -While creating or retrieving aliases by providing wildcard expressions for alias names, if there are no existing authorized aliases -that match the wildcard expression provided an IndexNotFoundException is returned. [float] === Multi Document APIs diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index ca926fa0d54..7ed98ccb5b4 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -20,7 +20,6 @@ esplugin { } dependencyLicenses { - mapping from: /bc.*/, to: 'bouncycastle' mapping from: /http.*/, to: 'httpclient' // pulled in by rest client mapping from: /commons-.*/, to: 'commons' // pulled in by rest client } @@ -38,8 +37,6 @@ dependencies { // security deps compile 'com.unboundid:unboundid-ldapsdk:3.2.0' - compile 'org.bouncycastle:bcprov-jdk15on:1.59' - compile 'org.bouncycastle:bcpkix-jdk15on:1.59' compile project(path: ':modules:transport-netty4', configuration: 'runtime') testCompile 'org.elasticsearch:securemock:1.2' @@ -116,6 +113,7 @@ task testJar(type: Jar) { appendix 'test' from sourceSets.test.output } + artifacts { // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions archives jar diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java index 26c35db1fc9..2668e62abbc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java @@ -100,7 +100,6 @@ public class SecurityClient { * Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively * select the realms (by their unique names) and/or users (by their usernames) that should be evicted. */ - @SuppressWarnings("unchecked") public ClearRealmCacheRequestBuilder prepareClearRealmCache() { return new ClearRealmCacheRequestBuilder(client); } @@ -109,7 +108,6 @@ public class SecurityClient { * Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively * select the realms (by their unique names) and/or users (by their usernames) that should be evicted. */ - @SuppressWarnings("unchecked") public void clearRealmCache(ClearRealmCacheRequest request, ActionListener listener) { client.execute(ClearRealmCacheAction.INSTANCE, request, listener); } @@ -118,7 +116,6 @@ public class SecurityClient { * Clears the realm caches. It's possible to clear all user entries from all realms in the cluster or alternatively * select the realms (by their unique names) and/or users (by their usernames) that should be evicted. */ - @SuppressWarnings("unchecked") public ActionFuture clearRealmCache(ClearRealmCacheRequest request) { return client.execute(ClearRealmCacheAction.INSTANCE, request); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java index 1617a92b550..6503f686b64 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java @@ -63,7 +63,7 @@ public class CertParsingUtils { return PathUtils.get(path).normalize(); } - static KeyStore readKeyStore(Path path, String type, char[] password) + public static KeyStore readKeyStore(Path path, String type, char[] password) throws IOException, KeyStoreException, CertificateException, NoSuchAlgorithmException { try (InputStream in = Files.newInputStream(path)) { KeyStore store = KeyStore.getInstance(type); @@ -108,7 +108,7 @@ public class CertParsingUtils { return certificates.toArray(new X509Certificate[0]); } - static List readCertificates(InputStream input) throws CertificateException, IOException { + public static List readCertificates(InputStream input) throws CertificateException, IOException { CertificateFactory certFactory = CertificateFactory.getInstance("X.509"); Collection certificates = (Collection) certFactory.generateCertificates(input); return new ArrayList<>(certificates); @@ -140,7 +140,7 @@ public class CertParsingUtils { /** * Creates a {@link KeyStore} from a PEM encoded certificate and key file */ - static KeyStore getKeyStoreFromPEM(Path certificatePath, Path keyPath, char[] keyPassword) + public static KeyStore getKeyStoreFromPEM(Path certificatePath, Path keyPath, char[] keyPassword) throws IOException, CertificateException, KeyStoreException, NoSuchAlgorithmException { final PrivateKey key = PemUtils.readPrivateKey(keyPath, () -> keyPassword); final Certificate[] certificates = readCertificates(Collections.singletonList(certificatePath)); @@ -168,7 +168,7 @@ public class CertParsingUtils { /** * Returns a {@link X509ExtendedKeyManager} that is built from the provided keystore */ - static X509ExtendedKeyManager keyManager(KeyStore keyStore, char[] password, String algorithm) + public static X509ExtendedKeyManager keyManager(KeyStore keyStore, char[] password, String algorithm) throws NoSuchAlgorithmException, UnrecoverableKeyException, KeyStoreException { KeyManagerFactory kmf = KeyManagerFactory.getInstance(algorithm); kmf.init(keyStore, password); @@ -271,7 +271,7 @@ public class CertParsingUtils { /** * Creates a {@link X509ExtendedTrustManager} based on the trust material in the provided {@link KeyStore} */ - static X509ExtendedTrustManager trustManager(KeyStore keyStore, String algorithm) + public static X509ExtendedTrustManager trustManager(KeyStore keyStore, String algorithm) throws NoSuchAlgorithmException, KeyStoreException { TrustManagerFactory tmf = TrustManagerFactory.getInstance(algorithm); tmf.init(keyStore); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java index 84a643ae72d..89279f4ea31 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java @@ -76,7 +76,6 @@ public class IndexUpgradeAction extends Action { /** * Sets the index. */ - @SuppressWarnings("unchecked") public final Request index(String index) { this.index = index; return this; diff --git a/x-pack/plugin/core/src/main/resources/monitoring-beats.json b/x-pack/plugin/core/src/main/resources/monitoring-beats.json index ed027387a49..07756ba2602 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-beats.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-beats.json @@ -37,6 +37,49 @@ }, "state": { "properties": { + "beat": { + "properties": { + "name": { + "type": "keyword" + } + } + }, + "host": { + "properties": { + "architecture": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "os": { + "properties": { + "build": { + "type": "keyword" + }, + "family": { + "type": "keyword" + }, + "platform": { + "type": "keyword" + }, + "version": { + "type": "keyword" + } + } + } + } + }, + "input": { + "properties": { + "count": { + "type": "long" + }, + "names": { + "type": "keyword" + } + } + }, "module": { "properties": { "count": { @@ -46,6 +89,26 @@ "type": "keyword" } } + }, + "output": { + "properties": { + "name": { + "type": "keyword" + } + } + }, + "service": { + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "version": { + "type": "keyword" + } + } } } }, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java index 4be0cefe525..3e0d2d80651 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java @@ -37,7 +37,6 @@ public class MlRestTestStateCleaner { final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds"); datafeedsRequest.addParameter("filter_path", "datafeeds"); final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest); - @SuppressWarnings("unchecked") final List> datafeeds = (List>) XContentMapValues.extractValue("datafeeds", ESRestTestCase.entityAsMap(datafeedsResponse)); if (datafeeds == null) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java index ef285b87cf1..fc2ee52dc41 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java @@ -147,7 +147,6 @@ public class AnomalyRecordTests extends AbstractSerializingTestCase jobs = ESRestTestCase.entityAsMap(response); - @SuppressWarnings("unchecked") List> jobConfigs = (List>) XContentMapValues.extractValue("jobs", jobs); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 3d1011c47e2..d5461d85599 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -635,11 +635,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu // TODO review these settings .settings(Settings.builder() .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1") - .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting) - // Sacrifice durability for performance: in the event of power - // failure we can lose the last 5 seconds of changes, but it's - // much faster - .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async")) + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting)) .putMapping(ElasticsearchMappings.DOC_TYPE, Strings.toString(stateMapping)) .version(Version.CURRENT.id) .build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java index ef87fe392dd..e33dbc69db6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java @@ -43,13 +43,13 @@ import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; -import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.CategoryDefinition; import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.Result; +import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; import org.mockito.ArgumentCaptor; import java.io.IOException; @@ -252,7 +252,7 @@ public class JobProviderTests extends ESTestCase { BucketsQueryBuilder bq = new BucketsQueryBuilder().from(from).size(size).anomalyScoreThreshold(1.0); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.buckets(jobId, bq, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage buckets = holder[0]; @@ -286,7 +286,7 @@ public class JobProviderTests extends ESTestCase { BucketsQueryBuilder bq = new BucketsQueryBuilder().from(from).size(size).anomalyScoreThreshold(5.1) .includeInterim(true); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.buckets(jobId, bq, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage buckets = holder[0]; @@ -322,7 +322,7 @@ public class JobProviderTests extends ESTestCase { bq.anomalyScoreThreshold(5.1); bq.includeInterim(true); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.buckets(jobId, bq, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage buckets = holder[0]; @@ -368,7 +368,7 @@ public class JobProviderTests extends ESTestCase { BucketsQueryBuilder bq = new BucketsQueryBuilder(); bq.timestamp(Long.toString(now.getTime())); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] bucketHolder = new QueryPage[1]; provider.buckets(jobId, bq, q -> bucketHolder[0] = q, e -> {}, client); assertThat(bucketHolder[0].count(), equalTo(1L)); @@ -409,7 +409,7 @@ public class JobProviderTests extends ESTestCase { .epochEnd(String.valueOf(now.getTime())).includeInterim(true).sortField(sortfield) .recordScore(2.2); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.records(jobId, rqb, page -> holder[0] = page, RuntimeException::new, client); QueryPage recordPage = holder[0]; @@ -462,7 +462,7 @@ public class JobProviderTests extends ESTestCase { rqb.sortField(sortfield); rqb.recordScore(2.2); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.records(jobId, rqb, page -> holder[0] = page, RuntimeException::new, client); QueryPage recordPage = holder[0]; @@ -507,7 +507,7 @@ public class JobProviderTests extends ESTestCase { Client client = getMockedClient(qb -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.bucketRecords(jobId, bucket, from, size, true, sortfield, true, page -> holder[0] = page, RuntimeException::new, client); @@ -568,7 +568,7 @@ public class JobProviderTests extends ESTestCase { Client client = getMockedClient(q -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.categoryDefinitions(jobId, null, false, from, size, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); @@ -590,7 +590,7 @@ public class JobProviderTests extends ESTestCase { SearchResponse response = createSearchResponse(Collections.singletonList(source)); Client client = getMockedClient(q -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.categoryDefinitions(jobId, categoryId, false, null, null, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); @@ -632,7 +632,7 @@ public class JobProviderTests extends ESTestCase { Client client = getMockedClient(q -> qbHolder[0] = q, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; InfluencersQuery query = new InfluencersQueryBuilder().from(from).size(size).includeInterim(false).build(); provider.influencers(jobId, query, page -> holder[0] = page, RuntimeException::new, client); @@ -692,7 +692,7 @@ public class JobProviderTests extends ESTestCase { Client client = getMockedClient(q -> qbHolder[0] = q, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; InfluencersQuery query = new InfluencersQueryBuilder().from(from).size(size).start("0").end("0").sortField("sort") .sortDescending(true).influencerScoreThreshold(0.0).includeInterim(true).build(); @@ -747,7 +747,7 @@ public class JobProviderTests extends ESTestCase { Client client = getMockedClient(qb -> {}, response); JobProvider provider = createProvider(client); - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) QueryPage[] holder = new QueryPage[1]; provider.modelSnapshots(jobId, from, size, r -> holder[0] = r, RuntimeException::new); QueryPage page = holder[0]; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index f2c18ec9d5a..c31ebd4bc2c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -195,7 +195,7 @@ public class JobResultsPersisterTests extends ESTestCase { verifyNoMoreInteractions(client); } - @SuppressWarnings({"unchecked", "rawtypes"}) + @SuppressWarnings({"unchecked"}) private Client mockClient(ArgumentCaptor captor) { Client client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java index eedc42148b1..5f8b685f844 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/ScoresUpdaterTests.java @@ -65,7 +65,6 @@ public class ScoresUpdaterTests extends ESTestCase { } @Before - @SuppressWarnings("unchecked") public void setUpMocks() throws IOException { MockitoAnnotations.initMocks(this); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index a77f6bf24e9..efc32fccb3d 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -35,11 +35,11 @@ import org.elasticsearch.threadpool.ThreadPoolStats; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackUsageRequestBuilder; import org.elasticsearch.xpack.core.action.XPackUsageResponse; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkRequestBuilder; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkResponse; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; -import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; -import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; import org.elasticsearch.xpack.monitoring.LocalStateMonitoring; import org.elasticsearch.xpack.monitoring.MonitoringService; import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsMonitoringDoc; @@ -112,7 +112,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { * This test uses the Monitoring Bulk API to index document as an external application like Kibana would do. It * then ensure that the documents were correctly indexed and have the expected information. */ - @SuppressWarnings("unchecked") public void testMonitoringBulk() throws Exception { whenExportersAreReady(() -> { final MonitoredSystem system = randomSystem(); @@ -188,7 +187,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { * have been indexed with the expected information. */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29880") - @SuppressWarnings("unchecked") public void testMonitoringService() throws Exception { final boolean createAPMIndex = randomBoolean(); final String indexName = createAPMIndex ? "apm-2017.11.06" : "books"; @@ -284,7 +282,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { * Asserts that the source_node information (provided as a Map) of a monitoring document correspond to * the current local node information */ - @SuppressWarnings("unchecked") private void assertMonitoringDocSourceNode(final Map sourceNode) { assertEquals(6, sourceNode.size()); @@ -541,7 +538,6 @@ public class MonitoringIT extends ESSingleNodeTestCase { /** * Disable the monitoring service and the Local exporter. */ - @SuppressWarnings("unchecked") public void disableMonitoring() throws Exception { final Settings settings = Settings.builder() .putNull("xpack.monitoring.collection.enabled") diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 5bdfdf65014..854b0165954 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -22,8 +22,8 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') compile 'com.unboundid:unboundid-ldapsdk:3.2.0' - compile 'org.bouncycastle:bcprov-jdk15on:1.59' - compile 'org.bouncycastle:bcpkix-jdk15on:1.59' + compileOnly 'org.bouncycastle:bcprov-jdk15on:1.59' + compileOnly 'org.bouncycastle:bcpkix-jdk15on:1.59' // the following are all SAML dependencies - might as well download the whole internet compile "org.opensaml:opensaml-core:3.3.0" @@ -79,7 +79,6 @@ sourceSets.test.resources { srcDir '../core/src/test/resources' } dependencyLicenses { - mapping from: /bc.*/, to: 'bouncycastle' mapping from: /java-support|opensaml-.*/, to: 'shibboleth' mapping from: /http.*/, to: 'httpclient' } diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle new file mode 100644 index 00000000000..1799a2c7b81 --- /dev/null +++ b/x-pack/plugin/security/cli/build.gradle @@ -0,0 +1,20 @@ +apply plugin: 'elasticsearch.build' + +archivesBaseName = 'elasticsearch-security-cli' + +dependencies { + compileOnly "org.elasticsearch:elasticsearch:${version}" + compileOnly xpackProject('plugin:core') + compile 'org.bouncycastle:bcprov-jdk15on:1.59' + compile 'org.bouncycastle:bcpkix-jdk15on:1.59' + testImplementation 'com.google.jimfs:jimfs:1.1' + testCompile "junit:junit:${versions.junit}" + testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" + testCompile 'org.elasticsearch:securemock:1.2' + testCompile "org.elasticsearch.test:framework:${version}" + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') +} + +dependencyLicenses { + mapping from: /bc.*/, to: 'bouncycastle' +} \ No newline at end of file diff --git a/x-pack/plugin/core/licenses/bcpkix-jdk15on-1.59.jar.sha1 b/x-pack/plugin/security/cli/licenses/bcpkix-jdk15on-1.59.jar.sha1 similarity index 100% rename from x-pack/plugin/core/licenses/bcpkix-jdk15on-1.59.jar.sha1 rename to x-pack/plugin/security/cli/licenses/bcpkix-jdk15on-1.59.jar.sha1 diff --git a/x-pack/plugin/core/licenses/bcprov-jdk15on-1.59.jar.sha1 b/x-pack/plugin/security/cli/licenses/bcprov-jdk15on-1.59.jar.sha1 similarity index 100% rename from x-pack/plugin/core/licenses/bcprov-jdk15on-1.59.jar.sha1 rename to x-pack/plugin/security/cli/licenses/bcprov-jdk15on-1.59.jar.sha1 diff --git a/x-pack/plugin/core/licenses/bouncycastle-LICENSE.txt b/x-pack/plugin/security/cli/licenses/bouncycastle-LICENSE.txt similarity index 100% rename from x-pack/plugin/core/licenses/bouncycastle-LICENSE.txt rename to x-pack/plugin/security/cli/licenses/bouncycastle-LICENSE.txt diff --git a/x-pack/plugin/core/licenses/bouncycastle-NOTICE.txt b/x-pack/plugin/security/cli/licenses/bouncycastle-NOTICE.txt similarity index 100% rename from x-pack/plugin/core/licenses/bouncycastle-NOTICE.txt rename to x-pack/plugin/security/cli/licenses/bouncycastle-NOTICE.txt diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertGenUtils.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertGenUtils.java similarity index 93% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertGenUtils.java rename to x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertGenUtils.java index 6273456aca2..0b88f3da40a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertGenUtils.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertGenUtils.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import org.bouncycastle.asn1.ASN1Encodable; import org.bouncycastle.asn1.ASN1ObjectIdentifier; @@ -78,7 +78,7 @@ public class CertGenUtils { * Generates a CA certificate */ public static X509Certificate generateCACertificate(X500Principal x500Principal, KeyPair keyPair, int days) - throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { + throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { return generateSignedCertificate(x500Principal, null, keyPair, null, null, true, days, null); } @@ -100,7 +100,7 @@ public class CertGenUtils { */ public static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair, X509Certificate caCert, PrivateKey caPrivKey, int days) - throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { + throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { return generateSignedCertificate(principal, subjectAltNames, keyPair, caCert, caPrivKey, false, days, null); } @@ -125,7 +125,7 @@ public class CertGenUtils { public static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair, X509Certificate caCert, PrivateKey caPrivKey, int days, String signatureAlgorithm) - throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { + throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException { return generateSignedCertificate(principal, subjectAltNames, keyPair, caCert, caPrivKey, false, days, signatureAlgorithm); } @@ -150,7 +150,7 @@ public class CertGenUtils { private static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair, X509Certificate caCert, PrivateKey caPrivKey, boolean isCa, int days, String signatureAlgorithm) - throws NoSuchAlgorithmException, CertificateException, CertIOException, OperatorCreationException { + throws NoSuchAlgorithmException, CertificateException, CertIOException, OperatorCreationException { Objects.requireNonNull(keyPair, "Key-Pair must not be null"); final DateTime notBefore = new DateTime(DateTimeZone.UTC); if (days < 1) { @@ -175,8 +175,8 @@ public class CertGenUtils { } JcaX509v3CertificateBuilder builder = - new JcaX509v3CertificateBuilder(issuer, serial, - new Time(notBefore.toDate(), Locale.ROOT), new Time(notAfter.toDate(), Locale.ROOT), subject, keyPair.getPublic()); + new JcaX509v3CertificateBuilder(issuer, serial, + new Time(notBefore.toDate(), Locale.ROOT), new Time(notAfter.toDate(), Locale.ROOT), subject, keyPair.getPublic()); builder.addExtension(Extension.subjectKeyIdentifier, false, extUtils.createSubjectKeyIdentifier(keyPair.getPublic())); builder.addExtension(Extension.authorityKeyIdentifier, false, authorityKeyIdentifier); @@ -187,8 +187,8 @@ public class CertGenUtils { PrivateKey signingKey = caPrivKey != null ? caPrivKey : keyPair.getPrivate(); ContentSigner signer = new JcaContentSignerBuilder( - (Strings.isNullOrEmpty(signatureAlgorithm)) ? getDefaultSignatureAlgorithm(signingKey) : signatureAlgorithm) - .setProvider(CertGenUtils.BC_PROV).build(signingKey); + (Strings.isNullOrEmpty(signatureAlgorithm)) ? getDefaultSignatureAlgorithm(signingKey) : signatureAlgorithm) + .setProvider(CertGenUtils.BC_PROV).build(signingKey); X509CertificateHolder certificateHolder = builder.build(signer); return new JcaX509CertificateConverter().getCertificate(certificateHolder); } @@ -214,7 +214,7 @@ public class CertGenUtils { break; default: throw new IllegalArgumentException("Unsupported algorithm : " + key.getAlgorithm() - + " for signature, allowed values for private key algorithm are [RSA, DSA, EC]"); + + " for signature, allowed values for private key algorithm are [RSA, DSA, EC]"); } return signatureAlgorithm; } @@ -229,7 +229,7 @@ public class CertGenUtils { * @return a certificate signing request */ static PKCS10CertificationRequest generateCSR(KeyPair keyPair, X500Principal principal, GeneralNames sanList) - throws IOException, OperatorCreationException { + throws IOException, OperatorCreationException { Objects.requireNonNull(keyPair, "Key-Pair must not be null"); Objects.requireNonNull(keyPair.getPublic(), "Public-Key must not be null"); Objects.requireNonNull(principal, "Principal must not be null"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateTool.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateGenerateTool.java similarity index 90% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateTool.java rename to x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateGenerateTool.java index 5515978c3ca..809e4a6d305 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateTool.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateGenerateTool.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import joptsimple.ArgumentAcceptingOptionSpec; import joptsimple.OptionSet; @@ -34,6 +34,8 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import javax.security.auth.x500.X500Principal; @@ -68,6 +70,7 @@ import java.util.zip.ZipOutputStream; /** * CLI tool to make generation of certificates or certificate requests easier for users + * * @deprecated Replaced by {@link CertificateTool} */ @Deprecated @@ -81,7 +84,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { private static final int FILE_EXTENSION_LENGTH = 4; static final int MAX_FILENAME_LENGTH = 255 - FILE_EXTENSION_LENGTH; private static final Pattern ALLOWED_FILENAME_CHAR_PATTERN = - Pattern.compile("[a-zA-Z0-9!@#$%^&{}\\[\\]()_+\\-=,.~'` ]{1," + MAX_FILENAME_LENGTH + "}"); + Pattern.compile("[a-zA-Z0-9!@#$%^&{}\\[\\]()_+\\-=,.~'` ]{1," + MAX_FILENAME_LENGTH + "}"); private static final int DEFAULT_KEY_SIZE = 2048; private static final BouncyCastleProvider BC_PROV = new BouncyCastleProvider(); @@ -96,11 +99,11 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { // of the parser in this class so that we can defer initialization until after logging has been initialized static { @SuppressWarnings("unchecked") final ConstructingObjectParser instanceParser = - new ConstructingObjectParser<>( - "instances", - a -> new CertificateInformation( - (String) a[0], (String) (a[1] == null ? a[0] : a[1]), - (List) a[2], (List) a[3], (List) a[4])); + new ConstructingObjectParser<>( + "instances", + a -> new CertificateInformation( + (String) a[0], (String) (a[1] == null ? a[0] : a[1]), + (List) a[2], (List) a[3], (List) a[4])); instanceParser.declareString(ConstructingObjectParser.constructorArg(), new ParseField("name")); instanceParser.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("filename")); instanceParser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), new ParseField("ip")); @@ -125,29 +128,29 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { CertificateGenerateTool() { super(DESCRIPTION); outputPathSpec = parser.accepts("out", "path of the zip file that the output should be written to") - .withRequiredArg(); + .withRequiredArg(); csrSpec = parser.accepts("csr", "only generate certificate signing requests"); caCertPathSpec = parser.accepts("cert", "path to an existing ca certificate").availableUnless(csrSpec).withRequiredArg(); caKeyPathSpec = parser.accepts("key", "path to an existing ca private key") - .availableIf(caCertPathSpec) - .requiredIf(caCertPathSpec) - .withRequiredArg(); + .availableIf(caCertPathSpec) + .requiredIf(caCertPathSpec) + .withRequiredArg(); caPasswordSpec = parser.accepts("pass", "password for an existing ca private key or the generated ca private key") - .availableUnless(csrSpec) - .withOptionalArg(); + .availableUnless(csrSpec) + .withOptionalArg(); caDnSpec = parser.accepts("dn", "distinguished name to use for the generated ca. defaults to " + AUTO_GEN_CA_DN) - .availableUnless(caCertPathSpec) - .availableUnless(csrSpec) - .withRequiredArg(); + .availableUnless(caCertPathSpec) + .availableUnless(csrSpec) + .withRequiredArg(); keysizeSpec = parser.accepts("keysize", "size in bits of RSA keys").withRequiredArg().ofType(Integer.class); inputFileSpec = parser.accepts("in", "file containing details of the instances in yaml format").withRequiredArg(); daysSpec = parser.accepts("days", "number of days that the generated certificates are valid") - .availableUnless(csrSpec) - .withRequiredArg() - .ofType(Integer.class); + .availableUnless(csrSpec) + .withRequiredArg() + .ofType(Integer.class); p12Spec = parser.accepts("p12", "output a p12 (PKCS#12) version for each certificate/key pair, with optional password") - .availableUnless(csrSpec) - .withOptionalArg(); + .availableUnless(csrSpec) + .withOptionalArg(); } public static void main(String[] args) throws Exception { @@ -178,7 +181,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { p12Password = null; } CAInfo caInfo = getCAInfo(terminal, dn, caCertPathSpec.value(options), caKeyPathSpec.value(options), keyPass, prompt, env, - keysize, days); + keysize, days); Collection certificateInformations = getCertificateInformationList(terminal, inputFile); generateAndWriteSignedCertificates(outputFile, certificateInformations, caInfo, keysize, days, p12Password); } @@ -197,7 +200,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * Checks for output file in the user specified options or prompts the user for the output file * - * @param terminal terminal to communicate with a user + * @param terminal terminal to communicate with a user * @param outputPath user specified output file, may be {@code null} * @return a {@link Path} to the output file */ @@ -223,12 +226,13 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * This method handles the collection of information about each instance that is necessary to generate a certificate. The user may * be prompted or the information can be gathered from a file - * @param terminal the terminal to use for user interaction + * + * @param terminal the terminal to use for user interaction * @param inputFile an optional file that will be used to load the instance information * @return a {@link Collection} of {@link CertificateInformation} that represents each instance */ static Collection getCertificateInformationList(Terminal terminal, String inputFile) - throws Exception { + throws Exception { if (inputFile != null) { return parseAndValidateFile(terminal, resolvePath(inputFile).toAbsolutePath()); } @@ -239,7 +243,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { if (name.isEmpty() == false) { final boolean isNameValidFilename = Name.isValidFilename(name); String filename = terminal.readText("Enter name for directories and files " + (isNameValidFilename ? "[" + name + "]" : "") - + ": " ); + + ": "); if (filename.isEmpty() && isNameValidFilename) { filename = name; } @@ -267,7 +271,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { } String exit = terminal.readText("Would you like to specify another instance? Press 'y' to continue entering instance " + - "information: "); + "information: "); if ("y".equals(exit) == false) { done = true; } @@ -283,7 +287,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { if (errors.size() > 0) { hasError = true; terminal.println(Terminal.Verbosity.SILENT, "Configuration for instance " + certInfo.name.originalName - + " has invalid details"); + + " has invalid details"); for (String message : errors) { terminal.println(Terminal.Verbosity.SILENT, " * " + message); } @@ -298,6 +302,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * Parses the input file to retrieve the certificate information + * * @param file the file to parse * @return a collection of certificate information */ @@ -305,22 +310,23 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { try (Reader reader = Files.newBufferedReader(file)) { // EMPTY is safe here because we never use namedObject XContentParser xContentParser = XContentType.YAML.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, reader); + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, reader); return InputFileParser.PARSER.parse(xContentParser, new ArrayList<>(), null); } } /** * Generates certificate signing requests and writes them out to the specified file in zip format + * * @param outputFile the file to write the output to. This file must not already exist - * @param certInfo the details to use in the certificate signing requests + * @param certInfo the details to use in the certificate signing requests */ static void generateAndWriteCsrs(Path outputFile, Collection certInfo, int keysize) throws Exception { fullyWriteFile(outputFile, (outputStream, pemWriter) -> { for (CertificateInformation certificateInformation : certInfo) { KeyPair keyPair = CertGenUtils.generateKeyPair(keysize); GeneralNames sanList = getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, - certificateInformation.commonNames); + certificateInformation.commonNames); PKCS10CertificationRequest csr = CertGenUtils.generateCSR(keyPair, certificateInformation.name.x500Principal, sanList); final String dirName = certificateInformation.name.filename + "/"; @@ -347,15 +353,15 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { * Returns the CA certificate and private key that will be used to sign certificates. These may be specified by the user or * automatically generated * - * @param terminal the terminal to use for prompting the user - * @param dn the distinguished name to use for the CA + * @param terminal the terminal to use for prompting the user + * @param dn the distinguished name to use for the CA * @param caCertPath the path to the CA certificate or {@code null} if not provided - * @param caKeyPath the path to the CA private key or {@code null} if not provided - * @param prompt whether we should prompt the user for a password - * @param keyPass the password to the private key. If not present and the key is encrypted the user will be prompted - * @param env the environment for this tool to resolve files with - * @param keysize the size of the key in bits - * @param days the number of days that the certificate should be valid for + * @param caKeyPath the path to the CA private key or {@code null} if not provided + * @param prompt whether we should prompt the user for a password + * @param keyPass the password to the private key. If not present and the key is encrypted the user will be prompted + * @param env the environment for this tool to resolve files with + * @param keysize the size of the key in bits + * @param days the number of days that the certificate should be valid for * @return CA cert and private key */ static CAInfo getCAInfo(Terminal terminal, String dn, String caCertPath, String caKeyPath, char[] keyPass, boolean prompt, @@ -366,7 +372,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { Certificate[] certificates = CertParsingUtils.readCertificates(Collections.singletonList(resolvedCaCertPath), env); if (certificates.length != 1) { throw new IllegalArgumentException("expected a single certificate in file [" + caCertPath + "] but found [" + - certificates.length + "]"); + certificates.length + "]"); } Certificate caCert = certificates[0]; PrivateKey privateKey = readPrivateKey(caKeyPath, keyPass, terminal, prompt); @@ -388,11 +394,12 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * Generates signed certificates in PEM format stored in a zip file - * @param outputFile the file that the certificates will be written to. This file must not exist + * + * @param outputFile the file that the certificates will be written to. This file must not exist * @param certificateInformations details for creation of the certificates - * @param caInfo the CA information to sign the certificates with - * @param keysize the size of the key in bits - * @param days the number of days that the certificate should be valid for + * @param caInfo the CA information to sign the certificates with + * @param keysize the size of the key in bits + * @param days the number of days that the certificate should be valid for */ static void generateAndWriteSignedCertificates(Path outputFile, Collection certificateInformations, CAInfo caInfo, int keysize, int days, char[] pkcs12Password) throws Exception { @@ -403,9 +410,9 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { for (CertificateInformation certificateInformation : certificateInformations) { KeyPair keyPair = CertGenUtils.generateKeyPair(keysize); Certificate certificate = CertGenUtils.generateSignedCertificate(certificateInformation.name.x500Principal, - getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, - certificateInformation.commonNames), - keyPair, caInfo.caCert, caInfo.privateKey, days); + getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, + certificateInformation.commonNames), + keyPair, caInfo.caCert, caInfo.privateKey, days); final String dirName = certificateInformation.name.filename + "/"; ZipEntry zipEntry = new ZipEntry(dirName); @@ -429,7 +436,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { final KeyStore pkcs12 = KeyStore.getInstance("PKCS12"); pkcs12.load(null); pkcs12.setKeyEntry(certificateInformation.name.originalName, keyPair.getPrivate(), pkcs12Password, - new Certificate[]{certificate}); + new Certificate[]{certificate}); outputStream.putNextEntry(new ZipEntry(entryBase + ".p12")); pkcs12.store(outputStream, pkcs12Password); @@ -441,7 +448,8 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * This method handles the deletion of a file in the case of a partial write - * @param file the file that is being written to + * + * @param file the file that is being written to * @param writer writes the contents of the file */ private static void fullyWriteFile(Path file, Writer writer) throws Exception { @@ -468,9 +476,10 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * This method handles writing out the certificate authority cert and private key if the certificate authority was generated by * this invocation of the tool + * * @param outputStream the output stream to write to - * @param pemWriter the writer for PEM objects - * @param info the certificate authority information + * @param pemWriter the writer for PEM objects + * @param info the certificate authority information */ private static void writeCAInfoIfGenerated(ZipOutputStream outputStream, JcaPEMWriter pemWriter, CAInfo info) throws Exception { if (info.generated) { @@ -577,14 +586,15 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { /** * Helper method to read a private key and support prompting of user for a key. To avoid passwords being placed as an argument we * can prompt the user for their password if we encounter an encrypted key. - * @param path the path to the private key + * + * @param path the path to the private key * @param password the password provided by the user or {@code null} * @param terminal the terminal to use for user interaction - * @param prompt whether to prompt the user or not + * @param prompt whether to prompt the user or not * @return the {@link PrivateKey} that was read from the file */ private static PrivateKey readPrivateKey(String path, char[] password, Terminal terminal, boolean prompt) - throws Exception { + throws Exception { AtomicReference passwordReference = new AtomicReference<>(password); try { return PemUtils.readPrivateKey(resolvePath(path), () -> { @@ -682,7 +692,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { } } catch (IllegalArgumentException e) { String error = "[" + name + "] could not be converted to a valid DN\n" + e.getMessage() + "\n" - + ExceptionsHelper.stackTrace(e); + + ExceptionsHelper.stackTrace(e); return new Name(name, null, null, error); } @@ -695,15 +705,15 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand { static boolean isValidFilename(String name) { return ALLOWED_FILENAME_CHAR_PATTERN.matcher(name).matches() - && ALLOWED_FILENAME_CHAR_PATTERN.matcher(resolvePath(name).toString()).matches() - && name.startsWith(".") == false; + && ALLOWED_FILENAME_CHAR_PATTERN.matcher(resolvePath(name).toString()).matches() + && name.startsWith(".") == false; } @Override public String toString() { return getClass().getSimpleName() - + "{original=[" + originalName + "] principal=[" + x500Principal - + "] file=[" + filename + "] err=[" + error + "]}"; + + "{original=[" + originalName + "] principal=[" + x500Principal + + "] file=[" + filename + "] err=[" + error + "]}"; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateTool.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java similarity index 92% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateTool.java rename to x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java index dd90df4dd6a..a966cac9109 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertificateTool.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertificateTool.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import joptsimple.OptionParser; import joptsimple.OptionSet; @@ -39,6 +39,8 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import javax.security.auth.x500.X500Principal; @@ -101,7 +103,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { private static final int FILE_EXTENSION_LENGTH = 4; static final int MAX_FILENAME_LENGTH = 255 - FILE_EXTENSION_LENGTH; private static final Pattern ALLOWED_FILENAME_CHAR_PATTERN = - Pattern.compile("[a-zA-Z0-9!@#$%^&{}\\[\\]()_+\\-=,.~'` ]{1," + MAX_FILENAME_LENGTH + "}"); + Pattern.compile("[a-zA-Z0-9!@#$%^&{}\\[\\]()_+\\-=,.~'` ]{1," + MAX_FILENAME_LENGTH + "}"); private static final int DEFAULT_KEY_SIZE = 2048; /** @@ -115,11 +117,11 @@ public class CertificateTool extends LoggingAwareMultiCommand { // of the parser in this class so that we can defer initialization until after logging has been initialized static { @SuppressWarnings("unchecked") final ConstructingObjectParser instanceParser = - new ConstructingObjectParser<>( - "instances", - a -> new CertificateInformation( - (String) a[0], (String) (a[1] == null ? a[0] : a[1]), - (List) a[2], (List) a[3], (List) a[4])); + new ConstructingObjectParser<>( + "instances", + a -> new CertificateInformation( + (String) a[0], (String) (a[1] == null ? a[0] : a[1]), + (List) a[2], (List) a[3], (List) a[4])); instanceParser.declareString(ConstructingObjectParser.constructorArg(), new ParseField("name")); instanceParser.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("filename")); instanceParser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), new ParseField("ip")); @@ -144,28 +146,28 @@ public class CertificateTool extends LoggingAwareMultiCommand { static final String INTRO_TEXT = "This tool assists you in the generation of X.509 certificates and certificate\n" + - "signing requests for use with SSL/TLS in the Elastic stack."; + "signing requests for use with SSL/TLS in the Elastic stack."; static final String INSTANCE_EXPLANATION = - " * An instance is any piece of the Elastic Stack that requires a SSL certificate.\n" + - " Depending on your configuration, Elasticsearch, Logstash, Kibana, and Beats\n" + - " may all require a certificate and private key.\n" + - " * The minimum required value for each instance is a name. This can simply be the\n" + - " hostname, which will be used as the Common Name of the certificate. A full\n" + - " distinguished name may also be used.\n" + - " * A filename value may be required for each instance. This is necessary when the\n" + - " name would result in an invalid file or directory name. The name provided here\n" + - " is used as the directory name (within the zip) and the prefix for the key and\n" + - " certificate files. The filename is required if you are prompted and the name\n" + - " is not displayed in the prompt.\n" + - " * IP addresses and DNS names are optional. Multiple values can be specified as a\n" + - " comma separated string. If no IP addresses or DNS names are provided, you may\n" + - " disable hostname verification in your SSL configuration."; + " * An instance is any piece of the Elastic Stack that requires a SSL certificate.\n" + + " Depending on your configuration, Elasticsearch, Logstash, Kibana, and Beats\n" + + " may all require a certificate and private key.\n" + + " * The minimum required value for each instance is a name. This can simply be the\n" + + " hostname, which will be used as the Common Name of the certificate. A full\n" + + " distinguished name may also be used.\n" + + " * A filename value may be required for each instance. This is necessary when the\n" + + " name would result in an invalid file or directory name. The name provided here\n" + + " is used as the directory name (within the zip) and the prefix for the key and\n" + + " certificate files. The filename is required if you are prompted and the name\n" + + " is not displayed in the prompt.\n" + + " * IP addresses and DNS names are optional. Multiple values can be specified as a\n" + + " comma separated string. If no IP addresses or DNS names are provided, you may\n" + + " disable hostname verification in your SSL configuration."; static final String CA_EXPLANATION = - " * All certificates generated by this tool will be signed by a certificate authority (CA).\n" + - " * The tool can automatically generate a new CA for you, or you can provide your own with the\n" + - " -ca or -ca-cert command line options."; + " * All certificates generated by this tool will be signed by a certificate authority (CA).\n" + + " * The tool can automatically generate a new CA for you, or you can provide your own with the\n" + + " -ca or -ca-cert command line options."; abstract static class CertificateCommand extends EnvironmentAwareCommand { @@ -202,32 +204,32 @@ public class CertificateTool extends LoggingAwareMultiCommand { final void acceptCertificateGenerationOptions() { pemFormatSpec = parser.accepts("pem", "output certificates and keys in PEM format instead of PKCS#12"); daysSpec = parser.accepts("days", "number of days that the generated certificates are valid") - .withRequiredArg().ofType(Integer.class); + .withRequiredArg().ofType(Integer.class); } final void acceptsCertificateAuthority() { caPkcs12PathSpec = parser.accepts("ca", "path to an existing ca key pair (in PKCS#12 format)").withRequiredArg(); caCertPathSpec = parser.accepts("ca-cert", "path to an existing ca certificate") - .availableUnless(caPkcs12PathSpec) - .withRequiredArg(); + .availableUnless(caPkcs12PathSpec) + .withRequiredArg(); caKeyPathSpec = parser.accepts("ca-key", "path to an existing ca private key") - .availableIf(caCertPathSpec) - .requiredIf(caCertPathSpec) - .withRequiredArg(); + .availableIf(caCertPathSpec) + .requiredIf(caCertPathSpec) + .withRequiredArg(); keepCaKeySpec = parser.accepts("keep-ca-key", "retain the CA private key for future use") - .availableUnless(caPkcs12PathSpec) - .availableUnless(caCertPathSpec); + .availableUnless(caPkcs12PathSpec) + .availableUnless(caCertPathSpec); caPasswordSpec = parser.accepts("ca-pass", "password for an existing ca private key or the generated ca private key") - .withOptionalArg(); + .withOptionalArg(); acceptsCertificateAuthorityName(); } void acceptsCertificateAuthorityName() { OptionSpecBuilder builder = parser.accepts("ca-dn", - "distinguished name to use for the generated ca. defaults to " + AUTO_GEN_CA_DN); + "distinguished name to use for the generated ca. defaults to " + AUTO_GEN_CA_DN); if (caPkcs12PathSpec != null) { builder = builder.availableUnless(caPkcs12PathSpec); } @@ -336,11 +338,11 @@ public class CertificateTool extends LoggingAwareMultiCommand { char[] passwordOption = getChars(caPasswordSpec.value(options)); Map keys = withPassword("CA (" + path + ")", passwordOption, - terminal, password -> CertParsingUtils.readPkcs12KeyPairs(path, password, a -> password)); + terminal, password -> CertParsingUtils.readPkcs12KeyPairs(path, password, a -> password)); if (keys.size() != 1) { throw new IllegalArgumentException("expected a single key in file [" + path.toAbsolutePath() + "] but found [" + - keys.size() + "]"); + keys.size() + "]"); } final Map.Entry pair = keys.entrySet().iterator().next(); return new CAInfo((X509Certificate) pair.getKey(), (PrivateKey) pair.getValue()); @@ -358,7 +360,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { Certificate[] certificates = CertParsingUtils.readCertificates(Collections.singletonList(resolvedCaCertPath), env); if (certificates.length != 1) { throw new IllegalArgumentException("expected a single certificate in file [" + resolvedCaCertPath + "] but found [" + - certificates.length + "]"); + certificates.length + "]"); } X509Certificate caCert = (X509Certificate) certificates[0]; PrivateKey privateKey = readPrivateKey(key, getChars(password), terminal); @@ -391,7 +393,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { * @return a {@link Collection} of {@link CertificateInformation} that represents each instance */ Collection getCertificateInformationList(Terminal terminal, OptionSet options) - throws Exception { + throws Exception { final Path input = resolvePath(options, inputFileSpec); if (input != null) { return parseAndValidateFile(terminal, input.toAbsolutePath()); @@ -456,7 +458,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { } String exit = terminal.readText("Would you like to specify another instance? Press 'y' to continue entering instance " + - "information: "); + "information: "); if ("y".equals(exit) == false) { done = true; } @@ -468,7 +470,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { final boolean isNameValidFilename = Name.isValidFilename(certName); while (true) { String filename = terminal.readText("Enter name for directories and files of " + certName + - (isNameValidFilename ? " [" + certName + "]" : "") + ": "); + (isNameValidFilename ? " [" + certName + "]" : "") + ": "); if (filename.isEmpty() && isNameValidFilename) { return certName; } @@ -490,7 +492,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { * @param includeKey if true, write the CA key in PEM format */ static void writeCAInfo(ZipOutputStream outputStream, JcaPEMWriter pemWriter, CAInfo info, boolean includeKey) - throws Exception { + throws Exception { final String caDirName = createCaDirectory(outputStream); outputStream.putNextEntry(new ZipEntry(caDirName + "ca.crt")); pemWriter.writeObject(info.certAndKey.cert); @@ -546,7 +548,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { pkcs12.load(null); withPassword(fileName, password, terminal, p12Password -> { if (isAscii(p12Password)) { - pkcs12.setKeyEntry(alias, pair.key, p12Password, new Certificate[] { pair.cert }); + pkcs12.setKeyEntry(alias, pair.key, p12Password, new Certificate[]{pair.cert}); if (caCert != null) { pkcs12.setCertificateEntry("ca", caCert); } @@ -574,7 +576,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { terminal.println("The 'csr' mode generates certificate signing requests that can be sent to"); terminal.println("a trusted certificate authority"); terminal.println(" * By default, this generates a single CSR for a single instance."); - terminal.println(" * You can use the '-multiple' option to generate CSRs for multiple" ); + terminal.println(" * You can use the '-multiple' option to generate CSRs for multiple"); terminal.println(" instances, each with their own private key."); terminal.println(" * The '-in' option allows for the CSR generation to be automated"); terminal.println(" by describing the details of each instance in a YAML file"); @@ -616,7 +618,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { for (CertificateInformation certificateInformation : certInfo) { KeyPair keyPair = CertGenUtils.generateKeyPair(keySize); GeneralNames sanList = getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, - certificateInformation.dnsNames, certificateInformation.commonNames); + certificateInformation.dnsNames, certificateInformation.commonNames); PKCS10CertificationRequest csr = CertGenUtils.generateCSR(keyPair, certificateInformation.name.x500Principal, sanList); final String dirName = certificateInformation.name.filename + "/"; @@ -750,7 +752,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { */ void generateAndWriteSignedCertificates(Path output, boolean writeZipFile, OptionSet options, Collection certs, CAInfo caInfo, Terminal terminal) - throws Exception { + throws Exception { checkDirectory(output, terminal); @@ -805,7 +807,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { final String fileName = entryBase + ".p12"; outputStream.putNextEntry(new ZipEntry(fileName)); writePkcs12(fileName, outputStream, certificateInformation.name.originalName, pair, caInfo.certAndKey.cert, - outputPassword, terminal); + outputPassword, terminal); outputStream.closeEntry(); } } @@ -815,7 +817,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { CertificateInformation certificateInformation = certs.iterator().next(); CertificateAndKey pair = generateCertificateAndKey(certificateInformation, caInfo, keySize, days); fullyWriteFile(output, stream -> writePkcs12(output.getFileName().toString(), stream, - certificateInformation.name.originalName, pair, caInfo.certAndKey.cert, outputPassword, terminal)); + certificateInformation.name.originalName, pair, caInfo.certAndKey.cert, outputPassword, terminal)); } } @@ -823,9 +825,9 @@ public class CertificateTool extends LoggingAwareMultiCommand { int keySize, int days) throws Exception { KeyPair keyPair = CertGenUtils.generateKeyPair(keySize); Certificate certificate = CertGenUtils.generateSignedCertificate(certificateInformation.name.x500Principal, - getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, - certificateInformation.commonNames), - keyPair, caInfo.certAndKey.cert, caInfo.certAndKey.key, days); + getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, + certificateInformation.commonNames), + keyPair, caInfo.certAndKey.cert, caInfo.certAndKey.key, days); return new CertificateAndKey((X509Certificate) certificate, keyPair.getPrivate()); } @@ -872,7 +874,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { } else { final String fileName = output.getFileName().toString(); fullyWriteFile(output, outputStream -> - writePkcs12(fileName, outputStream, "ca", caInfo.certAndKey, null, caInfo.password, terminal)); + writePkcs12(fileName, outputStream, "ca", caInfo.certAndKey, null, caInfo.password, terminal)); } } } @@ -912,7 +914,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { try (Reader reader = Files.newBufferedReader(file)) { // EMPTY is safe here because we never use namedObject XContentParser xContentParser = XContentType.YAML.xContent() - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, reader); + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, reader); return CertificateToolParser.PARSER.parse(xContentParser, new ArrayList<>(), null); } } @@ -1015,7 +1017,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { * @return the {@link PrivateKey} that was read from the file */ private static PrivateKey readPrivateKey(Path path, char[] password, Terminal terminal) - throws Exception { + throws Exception { AtomicReference passwordReference = new AtomicReference<>(password); try { return PemUtils.readPrivateKey(path, () -> { @@ -1125,7 +1127,7 @@ public class CertificateTool extends LoggingAwareMultiCommand { } } catch (IllegalArgumentException e) { String error = "[" + name + "] could not be converted to a valid DN\n" + e.getMessage() + "\n" - + ExceptionsHelper.stackTrace(e); + + ExceptionsHelper.stackTrace(e); return new Name(name, null, null, error); } @@ -1138,15 +1140,15 @@ public class CertificateTool extends LoggingAwareMultiCommand { static boolean isValidFilename(String name) { return ALLOWED_FILENAME_CHAR_PATTERN.matcher(name).matches() - && ALLOWED_FILENAME_CHAR_PATTERN.matcher(resolvePath(name).toString()).matches() - && name.startsWith(".") == false; + && ALLOWED_FILENAME_CHAR_PATTERN.matcher(resolvePath(name).toString()).matches() + && name.startsWith(".") == false; } @Override public String toString() { return getClass().getSimpleName() - + "{original=[" + originalName + "] principal=[" + x500Principal - + "] file=[" + filename + "] err=[" + error + "]}"; + + "{original=[" + originalName + "] principal=[" + x500Principal + + "] file=[" + filename + "] err=[" + error + "]}"; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertGenUtilsTests.java similarity index 97% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java rename to x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertGenUtilsTests.java index 20259144b42..bb1ed014b9c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java +++ b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertGenUtilsTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import org.bouncycastle.asn1.x509.GeneralName; import org.bouncycastle.asn1.x509.GeneralNames; @@ -12,6 +12,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.cli.CertGenUtils; import org.junit.BeforeClass; import java.math.BigInteger; diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateGenerateToolTests.java similarity index 98% rename from x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java rename to x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateGenerateToolTests.java index dde0b7645df..91fd55933c5 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java +++ b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateGenerateToolTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; @@ -33,9 +33,11 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; -import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.CAInfo; -import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.CertificateInformation; -import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.Name; +import org.elasticsearch.xpack.security.cli.CertificateGenerateTool.CAInfo; +import org.elasticsearch.xpack.security.cli.CertificateGenerateTool.CertificateInformation; +import org.elasticsearch.xpack.security.cli.CertificateGenerateTool.Name; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import org.hamcrest.Matchers; import org.junit.After; import org.junit.BeforeClass; @@ -359,8 +361,8 @@ public class CertificateGenerateToolTests extends ESTestCase { public void testGetCAInfo() throws Exception { Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); - Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.crt"); + Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.pem"); final boolean passwordPrompt = randomBoolean(); MockTerminal terminal = new MockTerminal(); if (passwordPrompt) { diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java similarity index 98% rename from x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java rename to x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java index 706d5dbab5f..9e970ea559a 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java +++ b/x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/CertificateToolTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.ssl; +package org.elasticsearch.xpack.security.cli; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; @@ -39,12 +39,14 @@ import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.TestMatchers; -import org.elasticsearch.xpack.core.ssl.CertificateTool.CAInfo; -import org.elasticsearch.xpack.core.ssl.CertificateTool.CertificateAuthorityCommand; -import org.elasticsearch.xpack.core.ssl.CertificateTool.CertificateCommand; -import org.elasticsearch.xpack.core.ssl.CertificateTool.CertificateInformation; -import org.elasticsearch.xpack.core.ssl.CertificateTool.GenerateCertificateCommand; -import org.elasticsearch.xpack.core.ssl.CertificateTool.Name; +import org.elasticsearch.xpack.security.cli.CertificateTool.CAInfo; +import org.elasticsearch.xpack.security.cli.CertificateTool.CertificateAuthorityCommand; +import org.elasticsearch.xpack.security.cli.CertificateTool.CertificateCommand; +import org.elasticsearch.xpack.security.cli.CertificateTool.CertificateInformation; +import org.elasticsearch.xpack.security.cli.CertificateTool.GenerateCertificateCommand; +import org.elasticsearch.xpack.security.cli.CertificateTool.Name; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import org.hamcrest.Matchers; import org.junit.After; import org.junit.BeforeClass; @@ -387,8 +389,8 @@ public class CertificateToolTests extends ESTestCase { public void testGetCAInfo() throws Exception { Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); - Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.crt"); + Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.pem"); final boolean passwordPrompt = randomBoolean(); MockTerminal terminal = new MockTerminal(); if (passwordPrompt) { diff --git a/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.crt b/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.crt new file mode 100644 index 00000000000..08c160bcea5 --- /dev/null +++ b/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.crt @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIID0zCCArugAwIBAgIJALi5bDfjMszLMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTUwOTIzMTg1MjU3WhcNMTkwOTIyMTg1MjU3 +WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA3rGZ1QbsW0+MuyrSLmMfDFKtLBkIFW8V0gRuurFg1PUKKNR1 +Mq2tMVwjjYETAU/UY0iKZOzjgvYPKhDTYBTte/WHR1ZK4CYVv7TQX/gtFQG/ge/c +7u0sLch9p7fbd+/HZiLS/rBEZDIohvgUvzvnA8+OIYnw4kuxKo/5iboAIS41klMg +/lATm8V71LMY68inht71/ZkQoAHKgcR9z4yNYvQ1WqKG8DG8KROXltll3sTrKbl5 +zJhn660es/1ZnR6nvwt6xnSTl/mNHMjkfv1bs4rJ/py3qPxicdoSIn/KyojUcgHV +F38fuAy2CQTdjVG5fWj9iz+mQvLm3+qsIYQdFwIDAQABo4G/MIG8MAkGA1UdEwQC +MAAwHQYDVR0OBBYEFEMMWLWQi/g83PzlHYqAVnty5L7HMIGPBgNVHREEgYcwgYSC +CWxvY2FsaG9zdIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghds +b2NhbGhvc3Q0LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5s +b2NhbGRvbWFpbjaHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAEwDQYJKoZIhvcNAQEL +BQADggEBAMjGGXT8Nt1tbl2GkiKtmiuGE2Ej66YuZ37WSJViaRNDVHLlg87TCcHe +k2rdO+6sFqQbbzEfwQ05T7xGmVu7tm54HwKMRugoQ3wct0bQC5wEWYN+oMDvSyO6 +M28mZwWb4VtR2IRyWP+ve5DHwTM9mxWa6rBlGzsQqH6YkJpZojzqk/mQTug+Y8aE +mVoqRIPMHq9ob+S9qd5lp09+MtYpwPfTPx/NN+xMEooXWW/ARfpGhWPkg/FuCu4z +1tFmCqHgNcWirzMm3dQpF78muE9ng6OB2MXQwL4VgnVkxmlZNHbkR2v/t8MyZJxC +y4g6cTMM3S/UMt5/+aIB2JAuMKyuD+A= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.pem b/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.pem new file mode 100644 index 00000000000..5a67e103344 --- /dev/null +++ b/x-pack/plugin/security/cli/src/test/resources/org/elasticsearch/xpack/security/cli/testnode.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,9D867F7E0C94D013 + +dVoVCjPeg1wgS7rVtOvGfQcrZyLkx393aWRnFq45tbjKBVuITtJ9vI7o4QXOV/15 +Gnb6WhXGIdWrzsxEAd46K6hIuNSISd4Emsx6c2Q5hTqWXXfexbOZBNfTtXtdJPnJ +1jAaikhtztLo3JSLTKNY5sNxd+XbaQyYVUWvueK6zOaIIMETvB+VPVFd9i1ROibk +Sgdtyj01KjkoalifqK/tA0CIYNKL0S6/eoK3UhAlpIprlpV+cnXa940C6bjLeJPt +PMAGGp5RrplxSgrSerw3I9DOWkHGtpqzIka3XneNUXJP8k4HUJ+aZkGH2ZILKS8d +4KMIb+KZSpHEGn+6uGccWLtZZmAjWJrDw56JbQtSHdRYLBRSOjLbTvQoPu/2Hpli +7HOxbotlvjptMunncq5aqK57SHA1dh0cwF7J3LUmGFJ67eoz+VV3b5qMn4MopSeI +mS16Ydd3nGpjSrln/elM0CQxqWfcOAXRZpDpFUQoXcBrLVzvz2DBl/0CrTRLhgzi +CO+5/IVcBWRlYpRNGgjjP7q0j6URID3jk5J06fYQXmBiwQT5j+GZqqzpMCJ9mIy2 +1O9SN1hebJnIcEU+E0njn/MGjlYdPywhaCy8pqElp6Q8TUEJpwLRFO/owCoBet/n +ZmCXUjfCGhc1pWHufFcDEQ6xMgEWWY/tdwCZeSU7EhErTjCbfupg+55A5fpDml0m +3wH4CFcuRjlqyx6Ywixm1ATeitDtJl5HQTw6b8OtEXwSgRmZ0eSqSRVk9QbVS7gu +IpQe09/Zimb5HzjZqZ3fdqHlcW4xax8hyJeyIvF5ZJ57eY8CBvu/wP2GDn26QnvF +xQqdfDbq1H4JmpwUHpbFwBoQK4Q6WFd1z4EA9bRQeo3H9PoqoOwMDjzajwLRF7b7 +q6tYH/n9PyHwdf1c4fFwgSmL1toXGfKlA9hjIaLsRSDD6srT5EdUk78bsnddwI51 +tu7C7P4JG+h1VdRNMNTlqtileWsIE7Nn2A1OkcUxZdF5mamENpDpJcHePLto6c8q +FKiwyFMsxhgsj6HK2HqO+UA4sX5Ni4oHwiPmb//EZLn045M5i1AN26KosJmb8++D +sgR5reWRy+UqJCTYblVg+7Dx++ggUnfxVyQEsWmw5r5f4KU5wXBkvoVMGtPNa9DE +n/uLtObD1qkNL38pRsr2OGRchYCgEoKGqEISBP4knfGXLOlWiW/246j9QzI97r1u +tvy7fKg28G7AUz9l6bpewsPHefBUeRQeieP9eJINaEpxkF/w2RpKDLpQjWxwDDOM +s+D0mrBMJve17AmJ8rMw6dIQPZYNZ88/jz1uQuUwQ2YlbmtZbCG81k9YMFGEU9XS +cyhJxj8hvYnt2PR5Z9/cJPyWOs0m/ufOeeQQ8SnU/lzmrQnpzUd2Z6p5i/B7LdRP +n1kX+l1qynuPnjvBz4nJQE0p6nzW8RyCDSniC9mtYtZmhgC8icqxgbvS7uEOBIYJ +NbK+0bEETTO34iY/JVTIqLOw3iQZYMeUpxpj6Phgx/oooxMTquMecPKNgeVtaBst +qjTNPX0ti1/HYpZqzYi8SV8YjHSJWCVMsZjKPr3W/HIcCKqYoIfgzi83Ha2KMQx6 +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/security/src/main/bin/elasticsearch-certgen b/x-pack/plugin/security/src/main/bin/elasticsearch-certgen index 4a192ac3b16..8e88e845e02 100644 --- a/x-pack/plugin/security/src/main/bin/elasticsearch-certgen +++ b/x-pack/plugin/security/src/main/bin/elasticsearch-certgen @@ -4,7 +4,8 @@ # or more contributor license agreements. Licensed under the Elastic License; # you may not use this file except in compliance with the Elastic License. -ES_MAIN_CLASS=org.elasticsearch.xpack.core.ssl.CertificateGenerateTool \ +ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.CertificateGenerateTool \ ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \ + ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \ "`dirname "$0"`"/elasticsearch-cli \ "$@" diff --git a/x-pack/plugin/security/src/main/bin/elasticsearch-certgen.bat b/x-pack/plugin/security/src/main/bin/elasticsearch-certgen.bat index b5842b57b16..bb303f740e5 100644 --- a/x-pack/plugin/security/src/main/bin/elasticsearch-certgen.bat +++ b/x-pack/plugin/security/src/main/bin/elasticsearch-certgen.bat @@ -7,8 +7,9 @@ rem you may not use this file except in compliance with the Elastic License. setlocal enabledelayedexpansion setlocal enableextensions -set ES_MAIN_CLASS=org.elasticsearch.xpack.core.ssl.CertificateGenerateTool +set ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.CertificateGenerateTool set ES_ADDITIONAL_SOURCES=x-pack-env;x-pack-security-env +set ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli call "%~dp0elasticsearch-cli.bat" ^ %%* ^ || exit /b 1 diff --git a/x-pack/plugin/security/src/main/bin/elasticsearch-certutil b/x-pack/plugin/security/src/main/bin/elasticsearch-certutil index a13be812f0b..6d94344949b 100644 --- a/x-pack/plugin/security/src/main/bin/elasticsearch-certutil +++ b/x-pack/plugin/security/src/main/bin/elasticsearch-certutil @@ -4,7 +4,8 @@ # or more contributor license agreements. Licensed under the Elastic License; # you may not use this file except in compliance with the Elastic License. -ES_MAIN_CLASS=org.elasticsearch.xpack.core.ssl.CertificateTool \ +ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.CertificateTool \ ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \ + ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \ "`dirname "$0"`"/elasticsearch-cli \ "$@" diff --git a/x-pack/plugin/security/src/main/bin/elasticsearch-certutil.bat b/x-pack/plugin/security/src/main/bin/elasticsearch-certutil.bat index 2e397190f23..34f595824f8 100644 --- a/x-pack/plugin/security/src/main/bin/elasticsearch-certutil.bat +++ b/x-pack/plugin/security/src/main/bin/elasticsearch-certutil.bat @@ -7,8 +7,9 @@ rem you may not use this file except in compliance with the Elastic License. setlocal enabledelayedexpansion setlocal enableextensions -set ES_MAIN_CLASS=org.elasticsearch.xpack.core.ssl.CertificateTool +set ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.CertificateTool set ES_ADDITIONAL_SOURCES=x-pack-env;x-pack-security-env +set ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli call "%~dp0elasticsearch-cli.bat" ^ %%* ^ || exit /b 1 diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index 8bae951e883..3898e34b7a4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -541,7 +541,6 @@ public class AuthenticationService extends AbstractComponent { private final RestRequest request; - @SuppressWarnings("unchecked") AuditableRestRequest(AuditTrail auditTrail, AuthenticationFailureHandler failureHandler, ThreadContext threadContext, RestRequest request) { super(auditTrail, failureHandler, threadContext); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index 77170f7a1cb..2247cbe02a8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -20,7 +20,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; @@ -200,6 +199,8 @@ class IndicesAndAliasesResolver { if (aliasesRequest.expandAliasesWildcards()) { List aliases = replaceWildcardsWithAuthorizedAliases(aliasesRequest.aliases(), loadAuthorizedAliases(authorizedIndices.get(), metaData)); + //it may be that we replace aliases with an empty array, in case there are no authorized aliases for the action. + //MetaData#findAliases will return nothing when some alias was originally requested, which was replaced with empty. aliasesRequest.replaceAliases(aliases.toArray(new String[aliases.size()])); } if (indicesReplacedWithNoIndices) { @@ -240,8 +241,7 @@ class IndicesAndAliasesResolver { } else { // the user is not authorized to put mappings for this index, but could have been // authorized for a write using an alias that triggered a dynamic mapping update - ImmutableOpenMap> foundAliases = - metaData.findAliases(Strings.EMPTY_ARRAY, new String[] { concreteIndexName }); + ImmutableOpenMap> foundAliases = metaData.findAllAliases(new String[] { concreteIndexName }); List aliasMetaData = foundAliases.get(concreteIndexName); if (aliasMetaData != null) { Optional foundAlias = aliasMetaData.stream() @@ -279,14 +279,12 @@ class IndicesAndAliasesResolver { List finalAliases = new ArrayList<>(); //IndicesAliasesRequest doesn't support empty aliases (validation fails) but GetAliasesRequest does (in which case empty means _all) - boolean matchAllAliases = aliases.length == 0; - if (matchAllAliases) { + if (aliases.length == 0) { finalAliases.addAll(authorizedAliases); } for (String aliasPattern : aliases) { if (aliasPattern.equals(MetaData.ALL)) { - matchAllAliases = true; finalAliases.addAll(authorizedAliases); } else if (Regex.isSimpleMatchPattern(aliasPattern)) { for (String authorizedAlias : authorizedAliases) { @@ -298,16 +296,6 @@ class IndicesAndAliasesResolver { finalAliases.add(aliasPattern); } } - - //Throw exception if the wildcards expansion to authorized aliases resulted in no indices. - //We always need to replace wildcards for security reasons, to make sure that the operation is executed on the aliases that we - //authorized it to execute on. Empty set gets converted to _all by es core though, and unlike with indices, here we don't have - //a special expression to replace empty set with, which gives us the guarantee that nothing will be returned. - //This is because existing aliases can contain all kinds of special characters, they are only validated since 5.1. - if (finalAliases.isEmpty()) { - String indexName = matchAllAliases ? MetaData.ALL : Arrays.toString(aliases); - throw new IndexNotFoundException(indexName); - } return finalAliases; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index c6cb8bb662c..036f1667e14 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; import org.elasticsearch.test.SecurityIntegTestCase; import org.junit.Before; @@ -235,15 +236,19 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //ok: user has manage_aliases on test_* assertAcked(client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*").get()); - //fails: all aliases have been deleted, no existing aliases match test_alias_* - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, + { + //fails: all aliases have been deleted, no existing aliases match test_alias_* + AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "test_alias_*")::get); - assertThat(indexNotFoundException.toString(), containsString("[test_alias_*]")); + assertThat(exception.getMessage(), equalTo("aliases [test_alias_*] missing")); + } - //fails: all aliases have been deleted, no existing aliases match _all - indexNotFoundException = expectThrows(IndexNotFoundException.class, + { + //fails: all aliases have been deleted, no existing aliases match _all + AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, client().filterWithHeader(headers).admin().indices().prepareAliases().removeAlias("test_1", "_all")::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + assertThat(exception.getMessage(), equalTo("aliases [_all] missing")); + } //fails: user doesn't have manage_aliases on alias_1 assertThrowsAuthorizationException(client().filterWithHeader(headers).admin().indices().prepareAliases() @@ -383,24 +388,27 @@ public class IndexAliasesTests extends SecurityIntegTestCase { getAliasesResponse = client.admin().indices().prepareGetAliases().setAliases("test_alias").get(); assertEquals(0, getAliasesResponse.getAliases().size()); - //fails: no existing aliases to replace wildcards - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, - client.admin().indices().prepareGetAliases().setIndices("test_1").setAliases("test_*")::get); - assertThat(indexNotFoundException.toString(), containsString("[test_*]")); - - //fails: no existing aliases to replace _all - indexNotFoundException = expectThrows(IndexNotFoundException.class, - client.admin().indices().prepareGetAliases().setIndices("test_1").setAliases("_all")::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); - - //fails: no existing aliases to replace empty aliases - indexNotFoundException = expectThrows(IndexNotFoundException.class, - client.admin().indices().prepareGetAliases().setIndices("test_1")::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); - - //fails: no existing aliases to replace empty aliases - indexNotFoundException = expectThrows(IndexNotFoundException.class, client.admin().indices().prepareGetAliases()::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + { + //fails: no existing aliases to replace wildcards + assertThrowsAuthorizationException( + client.admin().indices().prepareGetAliases().setIndices("test_1").setAliases("test_*")::get, + GetAliasesAction.NAME, "create_test_aliases_alias"); + } + { + //fails: no existing aliases to replace _all + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases().setIndices("test_1").setAliases("_all")::get, + GetAliasesAction.NAME, "create_test_aliases_alias"); + } + { + //fails: no existing aliases to replace empty aliases + assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases().setIndices("test_1")::get, + GetAliasesAction.NAME, "create_test_aliases_alias"); + } + { + //fails: no existing aliases to replace empty aliases + GetAliasesResponse response = client.admin().indices().prepareGetAliases().get(); + assertThat(response.getAliases().size(), equalTo(0)); + } } public void testCreateIndexThenAliasesCreateAndAliasesPermission3() { @@ -447,9 +455,9 @@ public class IndexAliasesTests extends SecurityIntegTestCase { assertAcked(client.admin().indices().prepareAliases().removeAlias("test_*", "_all")); //fails: all aliases have been deleted, _all can't be resolved to any existing authorized aliases - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, + AliasesNotFoundException exception = expectThrows(AliasesNotFoundException.class, client.admin().indices().prepareAliases().removeAlias("test_1", "_all")::get); - assertThat(indexNotFoundException.toString(), containsString("[_all]")); + assertThat(exception.getMessage(), equalTo("aliases [_all] missing")); } public void testGetAliasesCreateAndAliasesPermission3() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index d7c974bdc6e..bd5acdec818 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -80,6 +80,7 @@ import java.util.Set; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; @@ -781,10 +782,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testResolveAliasesWildcardsIndicesAliasesRequestDeleteActionsNoAuthorizedIndices() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("foo*").alias("foo*")); - //no authorized aliases match bar*, hence this action fails and makes the whole request fail + //no authorized aliases match bar*, hence aliases are replaced with empty string for that action request.addAliasAction(AliasActions.remove().index("*bar").alias("bar*")); - expectThrows(IndexNotFoundException.class, () -> resolveIndices( - request, buildAuthorizedIndices(user, IndicesAliasesAction.NAME))); + resolveIndices(request, buildAuthorizedIndices(user, IndicesAliasesAction.NAME)); + assertThat(request.getAliasActions().get(0).aliases().length, equalTo(1)); + assertThat(request.getAliasActions().get(1).aliases().length, equalTo(0)); } public void testResolveWildcardsIndicesAliasesRequestAddAndDeleteActions() { @@ -1086,12 +1088,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testResolveAliasesWildcardsGetAliasesRequestNoAuthorizedIndices() { GetAliasesRequest request = new GetAliasesRequest(); - //no authorized aliases match bar*, hence the request fails + //no authorized aliases match bar*, hence aliases are replaced with empty array request.aliases("bar*"); request.indices("*bar"); - IndexNotFoundException e = expectThrows(IndexNotFoundException.class, - () -> resolveIndices(request, buildAuthorizedIndices(user, GetAliasesAction.NAME))); - assertEquals("no such index", e.getMessage()); + resolveIndices(request, buildAuthorizedIndices(user, GetAliasesAction.NAME)); + assertThat(request.aliases().length, equalTo(0)); } public void testResolveAliasesAllGetAliasesRequestNoAuthorizedIndices() { @@ -1100,10 +1101,10 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { request.aliases("_all"); } request.indices("non_existing"); - //current user is not authorized for any index, foo* resolves to no indices, the request fails - IndexNotFoundException e = expectThrows(IndexNotFoundException.class, - () -> resolveIndices(request, buildAuthorizedIndices(userNoIndices, GetAliasesAction.NAME))); - assertEquals("no such index", e.getMessage()); + //current user is not authorized for any index, foo* resolves to no indices, aliases are replaced with empty array + ResolvedIndices resolvedIndices = resolveIndices(request, buildAuthorizedIndices(userNoIndices, GetAliasesAction.NAME)); + assertThat(resolvedIndices.getLocal(), contains("non_existing")); + assertThat(request.aliases().length, equalTo(0)); } /** diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java index c66ecbec2b3..67bfc2ecdcb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/RestAuthenticateActionTests.java @@ -58,7 +58,6 @@ public class RestAuthenticateActionTests extends SecurityIntegTestCase { assertThat(response.getStatusLine().getStatusCode(), is(200)); ObjectPath objectPath = ObjectPath.createFromResponse(response); assertThat(objectPath.evaluate("username").toString(), equalTo(SecuritySettingsSource.TEST_USER_NAME)); - @SuppressWarnings("unchecked") List roles = objectPath.evaluate("roles"); assertThat(roles.size(), is(1)); assertThat(roles, contains(SecuritySettingsSource.TEST_ROLE)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index feca093e581..70ab085fcf7 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -35,9 +35,6 @@ import org.elasticsearch.xpack.core.common.socket.SocketAccess; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; -import javax.net.SocketFactory; -import javax.net.ssl.HandshakeCompletedListener; -import javax.net.ssl.SSLSocket; import java.io.IOException; import java.net.InetAddress; import java.net.SocketTimeoutException; @@ -47,6 +44,10 @@ import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; +import javax.net.SocketFactory; +import javax.net.ssl.HandshakeCompletedListener; +import javax.net.ssl.SSLSocket; + import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.containsString; @@ -119,7 +120,6 @@ public class SimpleSecurityNioTransportTests extends AbstractSimpleTransportTest @Override protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException { - @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection; CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true); } diff --git a/x-pack/plugin/sql/sql-action/build.gradle b/x-pack/plugin/sql/sql-action/build.gradle index d8805d2e3db..f6b5177d508 100644 --- a/x-pack/plugin/sql/sql-action/build.gradle +++ b/x-pack/plugin/sql/sql-action/build.gradle @@ -24,7 +24,7 @@ dependencies { } compile xpackProject('plugin:sql:sql-proto') compile "org.apache.lucene:lucene-core:${versions.lucene}" - compile 'joda-time:joda-time:2.9.9' + compile 'joda-time:joda-time:2.10' runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" runtime "org.apache.logging.log4j:log4j-api:${versions.log4j}" runtime "org.apache.logging.log4j:log4j-core:${versions.log4j}" diff --git a/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.jar.sha1 new file mode 100644 index 00000000000..a597eabc654 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/joda-time-2.10.jar.sha1 @@ -0,0 +1 @@ +f66c8125d1057ffce6c4e29e624cac863e110e2b \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/joda-time-2.9.9.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/joda-time-2.9.9.jar.sha1 deleted file mode 100644 index 4009932ea3b..00000000000 --- a/x-pack/plugin/sql/sql-action/licenses/joda-time-2.9.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f7b520c458572890807d143670c9b24f4de90897 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/build.gradle b/x-pack/plugin/sql/sql-proto/build.gradle index 5a1439f4360..7f26176e3c7 100644 --- a/x-pack/plugin/sql/sql-proto/build.gradle +++ b/x-pack/plugin/sql/sql-proto/build.gradle @@ -17,7 +17,7 @@ dependencies { compile (project(':libs:x-content')) { transitive = false } - compile 'joda-time:joda-time:2.9.9' + compile 'joda-time:joda-time:2.10' runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" testCompile "org.elasticsearch.test:framework:${version}" diff --git a/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.10.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.10.jar.sha1 new file mode 100644 index 00000000000..a597eabc654 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.10.jar.sha1 @@ -0,0 +1 @@ +f66c8125d1057ffce6c4e29e624cac863e110e2b \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.9.9.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.9.9.jar.sha1 deleted file mode 100644 index 4009932ea3b..00000000000 --- a/x-pack/plugin/sql/sql-proto/licenses/joda-time-2.9.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f7b520c458572890807d143670c9b24f4de90897 \ No newline at end of file diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index f1d9eb1fb3f..f1495f4f3ac 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.http.HttpStatus; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.Response; @@ -219,7 +220,6 @@ public class XPackRestIT extends ESClientYamlSuiteTestCase { @SuppressWarnings("unchecked") final Map node = (Map) nodes.values().iterator().next(); - @SuppressWarnings("unchecked") final Number activeWrites = (Number) extractValue("thread_pool.write.active", node); return activeWrites != null && activeWrites.longValue() == 0L; } catch (Exception e) { diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 4a389414905..c27c9003355 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -12,10 +12,9 @@ integTest { integTestRunner { systemProperty 'tests.rest.blacklist', - ['cat.aliases/10_basic/Empty cluster', + [ 'index/10_with_id/Index with ID', - 'indices.get_alias/10_basic/Get alias against closed indices', - 'indices.get_alias/20_empty/Check empty aliases when getting all aliases via /_alias', + 'indices.get_alias/10_basic/Get alias against closed indices' ].join(',') systemProperty 'tests.rest.cluster.username', System.getProperty('tests.rest.cluster.username', 'test_user') diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index ec4e8824a19..7861557709e 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authc.ldap; import com.unboundid.ldap.sdk.LDAPException; import com.unboundid.ldap.sdk.ResultCode; + import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -302,7 +303,6 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryT } } - @SuppressWarnings("unchecked") @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29840") public void testHandlingLdapReferralErrors() throws Exception { String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com";