Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
5f696e15ea
|
@ -222,7 +222,11 @@ class BuildPlugin implements Plugin<Project> {
|
||||||
// IntelliJ does not set JAVA_HOME, so we use the JDK that Gradle was run with
|
// IntelliJ does not set JAVA_HOME, so we use the JDK that Gradle was run with
|
||||||
return Jvm.current().javaHome
|
return Jvm.current().javaHome
|
||||||
} else {
|
} else {
|
||||||
throw new GradleException("JAVA_HOME must be set to build Elasticsearch")
|
throw new GradleException(
|
||||||
|
"JAVA_HOME must be set to build Elasticsearch. " +
|
||||||
|
"Note that if the variable was just set you might have to run `./gradlew --stop` for " +
|
||||||
|
"it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details."
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return javaHome
|
return javaHome
|
||||||
|
|
|
@ -40,7 +40,7 @@ import java.util.Map;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
|
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments;
|
||||||
|
|
||||||
public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
||||||
Map<String, EvalQueryQuality> partialResults = response.getPartialResults();
|
Map<String, EvalQueryQuality> partialResults = response.getPartialResults();
|
||||||
assertEquals(2, partialResults.size());
|
assertEquals(2, partialResults.size());
|
||||||
EvalQueryQuality amsterdamQueryQuality = partialResults.get("amsterdam_query");
|
EvalQueryQuality amsterdamQueryQuality = partialResults.get("amsterdam_query");
|
||||||
assertEquals(2, filterUnknownDocuments(amsterdamQueryQuality.getHitsAndRatings()).size());
|
assertEquals(2, filterUnratedDocuments(amsterdamQueryQuality.getHitsAndRatings()).size());
|
||||||
List<RatedSearchHit> hitsAndRatings = amsterdamQueryQuality.getHitsAndRatings();
|
List<RatedSearchHit> hitsAndRatings = amsterdamQueryQuality.getHitsAndRatings();
|
||||||
assertEquals(7, hitsAndRatings.size());
|
assertEquals(7, hitsAndRatings.size());
|
||||||
for (RatedSearchHit hit : hitsAndRatings) {
|
for (RatedSearchHit hit : hitsAndRatings) {
|
||||||
|
@ -96,7 +96,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query");
|
EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query");
|
||||||
assertEquals(6, filterUnknownDocuments(berlinQueryQuality.getHitsAndRatings()).size());
|
assertEquals(6, filterUnratedDocuments(berlinQueryQuality.getHitsAndRatings()).size());
|
||||||
hitsAndRatings = berlinQueryQuality.getHitsAndRatings();
|
hitsAndRatings = berlinQueryQuality.getHitsAndRatings();
|
||||||
assertEquals(7, hitsAndRatings.size());
|
assertEquals(7, hitsAndRatings.size());
|
||||||
for (RatedSearchHit hit : hitsAndRatings) {
|
for (RatedSearchHit hit : hitsAndRatings) {
|
||||||
|
|
|
@ -724,8 +724,8 @@ public class RestHighLevelClientTests extends ESTestCase {
|
||||||
assertEquals(0, method.getExceptionTypes().length);
|
assertEquals(0, method.getExceptionTypes().length);
|
||||||
assertEquals(3, method.getParameterTypes().length);
|
assertEquals(3, method.getParameterTypes().length);
|
||||||
assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request"));
|
assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request"));
|
||||||
assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName()));
|
assertThat(method.getParameterTypes()[1], equalTo(RequestOptions.class));
|
||||||
assertThat(method.getParameterTypes()[2].getName(), equalTo(ActionListener.class.getName()));
|
assertThat(method.getParameterTypes()[2], equalTo(ActionListener.class));
|
||||||
} else {
|
} else {
|
||||||
//A few methods return a boolean rather than a response object
|
//A few methods return a boolean rather than a response object
|
||||||
if (apiName.equals("ping") || apiName.contains("exist")) {
|
if (apiName.equals("ping") || apiName.contains("exist")) {
|
||||||
|
@ -738,18 +738,23 @@ public class RestHighLevelClientTests extends ESTestCase {
|
||||||
//a few methods don't accept a request object as argument
|
//a few methods don't accept a request object as argument
|
||||||
if (apiName.equals("ping") || apiName.equals("info")) {
|
if (apiName.equals("ping") || apiName.equals("info")) {
|
||||||
assertEquals(1, method.getParameterTypes().length);
|
assertEquals(1, method.getParameterTypes().length);
|
||||||
assertThat(method.getParameterTypes()[0].getName(), equalTo(RequestOptions.class.getName()));
|
assertThat(method.getParameterTypes()[0], equalTo(RequestOptions.class));
|
||||||
} else {
|
} else {
|
||||||
assertEquals(apiName, 2, method.getParameterTypes().length);
|
assertEquals(apiName, 2, method.getParameterTypes().length);
|
||||||
assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request"));
|
assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request"));
|
||||||
assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName()));
|
assertThat(method.getParameterTypes()[1], equalTo(RequestOptions.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
boolean remove = apiSpec.remove(apiName);
|
boolean remove = apiSpec.remove(apiName);
|
||||||
if (remove == false && deprecatedMethods.contains(apiName) == false) {
|
if (remove == false) {
|
||||||
//TODO xpack api are currently ignored, we need to load xpack yaml spec too
|
if (deprecatedMethods.contains(apiName)) {
|
||||||
if (apiName.startsWith("xpack.") == false) {
|
assertTrue("method [" + method.getName() + "], api [" + apiName + "] should be deprecated",
|
||||||
apiNotFound.add(apiName);
|
method.isAnnotationPresent(Deprecated.class));
|
||||||
|
} else {
|
||||||
|
//TODO xpack api are currently ignored, we need to load xpack yaml spec too
|
||||||
|
if (apiName.startsWith("xpack.") == false) {
|
||||||
|
apiNotFound.add(apiName);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.transport.client;
|
||||||
|
|
||||||
import io.netty.util.ThreadDeathWatcher;
|
import io.netty.util.ThreadDeathWatcher;
|
||||||
import io.netty.util.concurrent.GlobalEventExecutor;
|
import io.netty.util.concurrent.GlobalEventExecutor;
|
||||||
|
|
||||||
import org.elasticsearch.client.transport.TransportClient;
|
import org.elasticsearch.client.transport.TransportClient;
|
||||||
import org.elasticsearch.common.SuppressForbidden;
|
import org.elasticsearch.common.SuppressForbidden;
|
||||||
import org.elasticsearch.common.network.NetworkModule;
|
import org.elasticsearch.common.network.NetworkModule;
|
||||||
|
|
|
@ -49,7 +49,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os
|
||||||
return copySpec {
|
return copySpec {
|
||||||
into("elasticsearch-${version}") {
|
into("elasticsearch-${version}") {
|
||||||
into('lib') {
|
into('lib') {
|
||||||
with libFiles
|
with libFiles(oss)
|
||||||
}
|
}
|
||||||
into('config') {
|
into('config') {
|
||||||
dirMode 0750
|
dirMode 0750
|
||||||
|
|
|
@ -227,16 +227,24 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
||||||
/*****************************************************************************
|
/*****************************************************************************
|
||||||
* Common files in all distributions *
|
* Common files in all distributions *
|
||||||
*****************************************************************************/
|
*****************************************************************************/
|
||||||
libFiles = copySpec {
|
libFiles = { oss ->
|
||||||
// delay by using closures, since they have not yet been configured, so no jar task exists yet
|
copySpec {
|
||||||
from { project(':server').jar }
|
// delay by using closures, since they have not yet been configured, so no jar task exists yet
|
||||||
from { project(':server').configurations.runtime }
|
from { project(':server').jar }
|
||||||
from { project(':libs:plugin-classloader').jar }
|
from { project(':server').configurations.runtime }
|
||||||
from { project(':distribution:tools:java-version-checker').jar }
|
from { project(':libs:plugin-classloader').jar }
|
||||||
from { project(':distribution:tools:launchers').jar }
|
from { project(':distribution:tools:java-version-checker').jar }
|
||||||
into('tools/plugin-cli') {
|
from { project(':distribution:tools:launchers').jar }
|
||||||
from { project(':distribution:tools:plugin-cli').jar }
|
into('tools/plugin-cli') {
|
||||||
from { project(':distribution:tools:plugin-cli').configurations.runtime }
|
from { project(':distribution:tools:plugin-cli').jar }
|
||||||
|
from { project(':distribution:tools:plugin-cli').configurations.runtime }
|
||||||
|
}
|
||||||
|
if (oss == false) {
|
||||||
|
into('tools/security-cli') {
|
||||||
|
from { project(':x-pack:plugin:security:cli').jar }
|
||||||
|
from { project(':x-pack:plugin:security:cli').configurations.compile }
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -126,7 +126,7 @@ Closure commonPackageConfig(String type, boolean oss) {
|
||||||
}
|
}
|
||||||
into('lib') {
|
into('lib') {
|
||||||
with copySpec {
|
with copySpec {
|
||||||
with libFiles
|
with libFiles(oss)
|
||||||
// we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine
|
// we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine
|
||||||
eachFile { FileCopyDetails fcp ->
|
eachFile { FileCopyDetails fcp ->
|
||||||
String[] segments = fcp.relativePath.segments
|
String[] segments = fcp.relativePath.segments
|
||||||
|
|
|
@ -26,14 +26,14 @@ include::install_remove.asciidoc[]
|
||||||
| `field` | yes | - | The field to get the ip address from for the geographical lookup.
|
| `field` | yes | - | The field to get the ip address from for the geographical lookup.
|
||||||
| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database.
|
| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database.
|
||||||
| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb, GeoLite2-Country.mmdb and GeoLite2-ASN.mmdb files.
|
| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb, GeoLite2-Country.mmdb and GeoLite2-ASN.mmdb files.
|
||||||
| `properties` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup.
|
| `properties` | no | [`continent_name`, `country_iso_code`, `region_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup.
|
||||||
| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document
|
| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document
|
||||||
|======
|
|======
|
||||||
|
|
||||||
*Depends on what is available in `database_field`:
|
*Depends on what is available in `database_field`:
|
||||||
|
|
||||||
* If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`,
|
* If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`,
|
||||||
`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
|
`country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
|
||||||
and `location`. The fields actually added depend on what has been found and which properties were configured in `properties`.
|
and `location`. The fields actually added depend on what has been found and which properties were configured in `properties`.
|
||||||
* If the GeoLite2 Country database is used, then the following fields may be added under the `target_field`: `ip`,
|
* If the GeoLite2 Country database is used, then the following fields may be added under the `target_field`: `ip`,
|
||||||
`country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which properties
|
`country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which properties
|
||||||
|
|
|
@ -1732,6 +1732,10 @@ For example, if you have a log message which contains `ip=1.2.3.4 error=REFUSED`
|
||||||
| `include_keys` | no | `null` | List of keys to filter and insert into document. Defaults to including all keys
|
| `include_keys` | no | `null` | List of keys to filter and insert into document. Defaults to including all keys
|
||||||
| `exclude_keys` | no | `null` | List of keys to exclude from document
|
| `exclude_keys` | no | `null` | List of keys to exclude from document
|
||||||
| `ignore_missing` | no | `false` | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document
|
| `ignore_missing` | no | `false` | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document
|
||||||
|
| `prefix` | no | `null` | Prefix to be added to extracted keys
|
||||||
|
| `trim_key` | no | `null` | String of characters to trim from extracted keys
|
||||||
|
| `trim_value` | no | `null` | String of characters to trim from extracted values
|
||||||
|
| `strip_brackets` | no | `false` | If `true` strip brackets `()`, `<>`, `[]` as well as quotes `'` and `"` from extracted values
|
||||||
|======
|
|======
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -74,7 +74,7 @@ field alias to query over multiple target fields in a single clause.
|
||||||
==== Unsupported APIs
|
==== Unsupported APIs
|
||||||
|
|
||||||
Writes to field aliases are not supported: attempting to use an alias in an index or update request
|
Writes to field aliases are not supported: attempting to use an alias in an index or update request
|
||||||
will result in a failure. Likewise, aliases cannot be used as the target of `copy_to`.
|
will result in a failure. Likewise, aliases cannot be used as the target of `copy_to` or in multi-fields.
|
||||||
|
|
||||||
Because alias names are not present in the document source, aliases cannot be used when performing
|
Because alias names are not present in the document source, aliases cannot be used when performing
|
||||||
source filtering. For example, the following request will return an empty result for `_source`:
|
source filtering. For example, the following request will return an empty result for `_source`:
|
||||||
|
|
|
@ -79,3 +79,11 @@ the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in
|
||||||
==== The deprecated stored script contexts have now been removed
|
==== The deprecated stored script contexts have now been removed
|
||||||
When putting stored scripts, support for storing them with the deprecated `template` context or without a context is
|
When putting stored scripts, support for storing them with the deprecated `template` context or without a context is
|
||||||
now removed. Scripts must be stored using the `script` context as mentioned in the documentation.
|
now removed. Scripts must be stored using the `script` context as mentioned in the documentation.
|
||||||
|
|
||||||
|
==== Get Aliases API limitations when {security} is enabled removed
|
||||||
|
|
||||||
|
The behavior and response codes of the get aliases API no longer vary
|
||||||
|
depending on whether {security} is enabled. Previously a
|
||||||
|
404 - NOT FOUND (IndexNotFoundException) could be returned in case the
|
||||||
|
current user was not authorized for any alias. An empty response with
|
||||||
|
status 200 - OK is now returned instead at all times.
|
||||||
|
|
|
@ -274,7 +274,7 @@ that shows potential errors of individual queries. The response has the followin
|
||||||
"details": {
|
"details": {
|
||||||
"my_query_id1": { <2>
|
"my_query_id1": { <2>
|
||||||
"quality_level": 0.6, <3>
|
"quality_level": 0.6, <3>
|
||||||
"unknown_docs": [ <4>
|
"unrated_docs": [ <4>
|
||||||
{
|
{
|
||||||
"_index": "my_index",
|
"_index": "my_index",
|
||||||
"_id": "1960795"
|
"_id": "1960795"
|
||||||
|
@ -309,7 +309,7 @@ that shows potential errors of individual queries. The response has the followin
|
||||||
<1> the overall evaluation quality calculated by the defined metric
|
<1> the overall evaluation quality calculated by the defined metric
|
||||||
<2> the `details` section contains one entry for every query in the original `requests` section, keyed by the search request id
|
<2> the `details` section contains one entry for every query in the original `requests` section, keyed by the search request id
|
||||||
<3> the `quality_level` in the `details` section shows the contribution of this query to the global quality score
|
<3> the `quality_level` in the `details` section shows the contribution of this query to the global quality score
|
||||||
<4> the `unknown_docs` section contains an `_index` and `_id` entry for each document in the search result for this
|
<4> the `unrated_docs` section contains an `_index` and `_id` entry for each document in the search result for this
|
||||||
query that didn't have a ratings value. This can be used to ask the user to supply ratings for these documents
|
query that didn't have a ratings value. This can be used to ask the user to supply ratings for these documents
|
||||||
<5> the `hits` section shows a grouping of the search results with their supplied rating
|
<5> the `hits` section shows a grouping of the search results with their supplied rating
|
||||||
<6> the `metric_details` give additional information about the calculated quality metric (e.g. how many of the retrieved
|
<6> the `metric_details` give additional information about the calculated quality metric (e.g. how many of the retrieved
|
||||||
|
|
|
@ -85,10 +85,6 @@ You can update this setting through the
|
||||||
|
|
||||||
Sets the timeout for collecting index statistics. Defaults to `10s`.
|
Sets the timeout for collecting index statistics. Defaults to `10s`.
|
||||||
|
|
||||||
`xpack.monitoring.collection.indices.stats.timeout`::
|
|
||||||
|
|
||||||
Sets the timeout for collecting total indices statistics. Defaults to `10s`.
|
|
||||||
|
|
||||||
`xpack.monitoring.collection.index.recovery.active_only`::
|
`xpack.monitoring.collection.index.recovery.active_only`::
|
||||||
|
|
||||||
Controls whether or not all recoveries are collected. Set to `true` to
|
Controls whether or not all recoveries are collected. Set to `true` to
|
||||||
|
|
|
@ -47,6 +47,8 @@ include::set-paths-tip.asciidoc[]
|
||||||
Use the `elasticsearch-plugin` script to install the upgraded version of each
|
Use the `elasticsearch-plugin` script to install the upgraded version of each
|
||||||
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
|
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
|
||||||
a node.
|
a node.
|
||||||
|
+
|
||||||
|
include::remove-xpack.asciidoc[]
|
||||||
|
|
||||||
. *Start each upgraded node.*
|
. *Start each upgraded node.*
|
||||||
+
|
+
|
||||||
|
@ -91,7 +93,7 @@ already have local shard copies.
|
||||||
+
|
+
|
||||||
--
|
--
|
||||||
When all nodes have joined the cluster and recovered their primary shards,
|
When all nodes have joined the cluster and recovered their primary shards,
|
||||||
reenable allocation by restoring `cluster.routing.allocation.enable` to its
|
reenable allocation by restoring `cluster.routing.allocation.enable` to its
|
||||||
default:
|
default:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
|
@ -123,4 +125,4 @@ GET _cat/recovery
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
--
|
--
|
||||||
|
|
||||||
. *Restart machine learning jobs.*
|
. *Restart machine learning jobs.*
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
IMPORTANT: If you use {xpack} and are upgrading from a version prior to 6.3,
|
||||||
|
remove {xpack} before restarting: `bin/elasticsearch-plugin remove x-pack`. As
|
||||||
|
of 6.3, {xpack} is included in the default distribution. The node will fail to
|
||||||
|
start if the old {xpack} plugin is present.
|
|
@ -53,6 +53,8 @@ include::set-paths-tip.asciidoc[]
|
||||||
Use the `elasticsearch-plugin` script to install the upgraded version of each
|
Use the `elasticsearch-plugin` script to install the upgraded version of each
|
||||||
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
|
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
|
||||||
a node.
|
a node.
|
||||||
|
+
|
||||||
|
include::remove-xpack.asciidoc[]
|
||||||
|
|
||||||
. *Start the upgraded node.*
|
. *Start the upgraded node.*
|
||||||
+
|
+
|
||||||
|
@ -144,7 +146,7 @@ for each node that needs to be updated.
|
||||||
|
|
||||||
--
|
--
|
||||||
|
|
||||||
. *Restart machine learning jobs.*
|
. *Restart machine learning jobs.*
|
||||||
|
|
||||||
[IMPORTANT]
|
[IMPORTANT]
|
||||||
====================================================
|
====================================================
|
||||||
|
|
|
@ -137,7 +137,6 @@ public class ChannelFactoryTests extends ESTestCase {
|
||||||
super(rawChannelFactory);
|
super(rawChannelFactory);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
@Override
|
@Override
|
||||||
public NioSocketChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException {
|
public NioSocketChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException {
|
||||||
NioSocketChannel nioSocketChannel = new NioSocketChannel(channel);
|
NioSocketChannel nioSocketChannel = new NioSocketChannel(channel);
|
||||||
|
|
|
@ -120,7 +120,6 @@ public class EventHandlerTests extends ESTestCase {
|
||||||
verify(channelFactory, times(2)).acceptNioChannel(same(serverContext), same(selectorSupplier));
|
verify(channelFactory, times(2)).acceptNioChannel(same(serverContext), same(selectorSupplier));
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public void testHandleAcceptCallsServerAcceptCallback() throws IOException {
|
public void testHandleAcceptCallsServerAcceptCallback() throws IOException {
|
||||||
NioSocketChannel childChannel = new NioSocketChannel(mock(SocketChannel.class));
|
NioSocketChannel childChannel = new NioSocketChannel(mock(SocketChannel.class));
|
||||||
SocketChannelContext childContext = mock(SocketChannelContext.class);
|
SocketChannelContext childContext = mock(SocketChannelContext.class);
|
||||||
|
|
|
@ -275,7 +275,6 @@ public class SocketChannelContextTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public void testCloseClosesChannelBuffer() throws IOException {
|
public void testCloseClosesChannelBuffer() throws IOException {
|
||||||
try (SocketChannel realChannel = SocketChannel.open()) {
|
try (SocketChannel realChannel = SocketChannel.open()) {
|
||||||
when(channel.getRawChannel()).thenReturn(realChannel);
|
when(channel.getRawChannel()).thenReturn(realChannel);
|
||||||
|
|
|
@ -43,7 +43,6 @@ class MultiPassStats {
|
||||||
this.fieldBKey = fieldBName;
|
this.fieldBKey = fieldBName;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
void computeStats(final List<Double> fieldA, final List<Double> fieldB) {
|
void computeStats(final List<Double> fieldA, final List<Double> fieldB) {
|
||||||
// set count
|
// set count
|
||||||
count = fieldA.size();
|
count = fieldA.size();
|
||||||
|
|
|
@ -20,11 +20,17 @@
|
||||||
esplugin {
|
esplugin {
|
||||||
description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources'
|
description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources'
|
||||||
classname 'org.elasticsearch.ingest.common.IngestCommonPlugin'
|
classname 'org.elasticsearch.ingest.common.IngestCommonPlugin'
|
||||||
|
extendedPlugins = ['lang-painless']
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
compileOnly project(':modules:lang-painless')
|
||||||
compile project(':libs:grok')
|
compile project(':libs:grok')
|
||||||
}
|
}
|
||||||
|
|
||||||
compileJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes"
|
compileJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes"
|
||||||
compileTestJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes"
|
compileTestJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes"
|
||||||
|
|
||||||
|
integTestCluster {
|
||||||
|
module project(':modules:lang-painless')
|
||||||
|
}
|
|
@ -35,9 +35,13 @@ public final class BytesProcessor extends AbstractStringProcessor {
|
||||||
super(processorTag, field, ignoreMissing, targetField);
|
super(processorTag, field, ignoreMissing, targetField);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static long apply(String value) {
|
||||||
|
return ByteSizeValue.parseBytesSizeValue(value, null, "Ingest Field").getBytes();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Long process(String value) {
|
protected Long process(String value) {
|
||||||
return ByteSizeValue.parseBytesSizeValue(value, null, getField()).getBytes();
|
return apply(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -67,13 +67,11 @@ public final class JsonProcessor extends AbstractProcessor {
|
||||||
return addToRoot;
|
return addToRoot;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public static Object apply(Object fieldValue) {
|
||||||
public void execute(IngestDocument document) throws Exception {
|
BytesReference bytesRef = fieldValue == null ? new BytesArray("null") : new BytesArray(fieldValue.toString());
|
||||||
Object fieldValue = document.getFieldValue(field, Object.class);
|
|
||||||
BytesReference bytesRef = (fieldValue == null) ? new BytesArray("null") : new BytesArray(fieldValue.toString());
|
|
||||||
try (InputStream stream = bytesRef.streamInput();
|
try (InputStream stream = bytesRef.streamInput();
|
||||||
XContentParser parser = JsonXContent.jsonXContent
|
XContentParser parser = JsonXContent.jsonXContent
|
||||||
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) {
|
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) {
|
||||||
XContentParser.Token token = parser.nextToken();
|
XContentParser.Token token = parser.nextToken();
|
||||||
Object value = null;
|
Object value = null;
|
||||||
if (token == XContentParser.Token.VALUE_NULL) {
|
if (token == XContentParser.Token.VALUE_NULL) {
|
||||||
|
@ -91,20 +89,32 @@ public final class JsonProcessor extends AbstractProcessor {
|
||||||
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||||
throw new IllegalArgumentException("cannot read binary value");
|
throw new IllegalArgumentException("cannot read binary value");
|
||||||
}
|
}
|
||||||
if (addToRoot && (value instanceof Map)) {
|
return value;
|
||||||
for (Map.Entry<String, Object> entry : ((Map<String, Object>) value).entrySet()) {
|
|
||||||
document.setFieldValue(entry.getKey(), entry.getValue());
|
|
||||||
}
|
|
||||||
} else if (addToRoot) {
|
|
||||||
throw new IllegalArgumentException("cannot add non-map fields to root of document");
|
|
||||||
} else {
|
|
||||||
document.setFieldValue(targetField, value);
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new IllegalArgumentException(e);
|
throw new IllegalArgumentException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void apply(Map<String, Object> ctx, String fieldName) {
|
||||||
|
Object value = apply(ctx.get(fieldName));
|
||||||
|
if (value instanceof Map) {
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
Map<String, Object> map = (Map<String, Object>) value;
|
||||||
|
ctx.putAll(map);
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("cannot add non-map fields to root of document");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void execute(IngestDocument document) throws Exception {
|
||||||
|
if (addToRoot) {
|
||||||
|
apply(document.getSourceAndMetadata(), field);
|
||||||
|
} else {
|
||||||
|
document.setFieldValue(targetField, apply(document.getFieldValue(field, Object.class)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getType() {
|
public String getType() {
|
||||||
return TYPE;
|
return TYPE;
|
||||||
|
|
|
@ -25,11 +25,14 @@ import org.elasticsearch.ingest.ConfigurationUtils;
|
||||||
import org.elasticsearch.ingest.IngestDocument;
|
import org.elasticsearch.ingest.IngestDocument;
|
||||||
import org.elasticsearch.ingest.Processor;
|
import org.elasticsearch.ingest.Processor;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The KeyValueProcessor parses and extracts messages of the `key=value` variety into fields with values of the keys.
|
* The KeyValueProcessor parses and extracts messages of the `key=value` variety into fields with values of the keys.
|
||||||
|
@ -38,6 +41,8 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
||||||
|
|
||||||
public static final String TYPE = "kv";
|
public static final String TYPE = "kv";
|
||||||
|
|
||||||
|
private static final Pattern STRIP_BRACKETS = Pattern.compile("(^[\\(\\[<\"'])|([\\]\\)>\"']$)");
|
||||||
|
|
||||||
private final String field;
|
private final String field;
|
||||||
private final String fieldSplit;
|
private final String fieldSplit;
|
||||||
private final String valueSplit;
|
private final String valueSplit;
|
||||||
|
@ -45,9 +50,11 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
||||||
private final Set<String> excludeKeys;
|
private final Set<String> excludeKeys;
|
||||||
private final String targetField;
|
private final String targetField;
|
||||||
private final boolean ignoreMissing;
|
private final boolean ignoreMissing;
|
||||||
|
private final Consumer<IngestDocument> execution;
|
||||||
|
|
||||||
KeyValueProcessor(String tag, String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
|
KeyValueProcessor(String tag, String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
|
||||||
Set<String> excludeKeys, String targetField, boolean ignoreMissing) {
|
Set<String> excludeKeys, String targetField, boolean ignoreMissing,
|
||||||
|
String trimKey, String trimValue, boolean stripBrackets, String prefix) {
|
||||||
super(tag);
|
super(tag);
|
||||||
this.field = field;
|
this.field = field;
|
||||||
this.targetField = targetField;
|
this.targetField = targetField;
|
||||||
|
@ -56,6 +63,92 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
||||||
this.includeKeys = includeKeys;
|
this.includeKeys = includeKeys;
|
||||||
this.excludeKeys = excludeKeys;
|
this.excludeKeys = excludeKeys;
|
||||||
this.ignoreMissing = ignoreMissing;
|
this.ignoreMissing = ignoreMissing;
|
||||||
|
this.execution = buildExecution(
|
||||||
|
fieldSplit, valueSplit, field, includeKeys, excludeKeys, targetField, ignoreMissing, trimKey, trimValue,
|
||||||
|
stripBrackets, prefix
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Consumer<IngestDocument> buildExecution(String fieldSplit, String valueSplit, String field,
|
||||||
|
Set<String> includeKeys, Set<String> excludeKeys,
|
||||||
|
String targetField, boolean ignoreMissing,
|
||||||
|
String trimKey, String trimValue, boolean stripBrackets,
|
||||||
|
String prefix) {
|
||||||
|
final Predicate<String> keyFilter;
|
||||||
|
if (includeKeys == null) {
|
||||||
|
if (excludeKeys == null) {
|
||||||
|
keyFilter = key -> true;
|
||||||
|
} else {
|
||||||
|
keyFilter = key -> excludeKeys.contains(key) == false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (excludeKeys == null) {
|
||||||
|
keyFilter = includeKeys::contains;
|
||||||
|
} else {
|
||||||
|
keyFilter = key -> includeKeys.contains(key) && excludeKeys.contains(key) == false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
final String fieldPathPrefix;
|
||||||
|
String keyPrefix = prefix == null ? "" : prefix;
|
||||||
|
if (targetField == null) {
|
||||||
|
fieldPathPrefix = keyPrefix;
|
||||||
|
} else {
|
||||||
|
fieldPathPrefix = targetField + "." + keyPrefix;
|
||||||
|
}
|
||||||
|
final Function<String, String> keyPrefixer;
|
||||||
|
if (fieldPathPrefix.isEmpty()) {
|
||||||
|
keyPrefixer = val -> val;
|
||||||
|
} else {
|
||||||
|
keyPrefixer = val -> fieldPathPrefix + val;
|
||||||
|
}
|
||||||
|
final Function<String, String[]> fieldSplitter = buildSplitter(fieldSplit, true);
|
||||||
|
Function<String, String[]> valueSplitter = buildSplitter(valueSplit, false);
|
||||||
|
final Function<String, String> keyTrimmer = buildTrimmer(trimKey);
|
||||||
|
final Function<String, String> bracketStrip;
|
||||||
|
if (stripBrackets) {
|
||||||
|
bracketStrip = val -> STRIP_BRACKETS.matcher(val).replaceAll("");
|
||||||
|
} else {
|
||||||
|
bracketStrip = val -> val;
|
||||||
|
}
|
||||||
|
final Function<String, String> valueTrimmer = buildTrimmer(trimValue);
|
||||||
|
return document -> {
|
||||||
|
String value = document.getFieldValue(field, String.class, ignoreMissing);
|
||||||
|
if (value == null) {
|
||||||
|
if (ignoreMissing) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs.");
|
||||||
|
}
|
||||||
|
for (String part : fieldSplitter.apply(value)) {
|
||||||
|
String[] kv = valueSplitter.apply(part);
|
||||||
|
if (kv.length != 2) {
|
||||||
|
throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]");
|
||||||
|
}
|
||||||
|
String key = keyTrimmer.apply(kv[0]);
|
||||||
|
if (keyFilter.test(key)) {
|
||||||
|
append(document, keyPrefixer.apply(key), valueTrimmer.apply(bracketStrip.apply(kv[1])));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Function<String, String> buildTrimmer(String trim) {
|
||||||
|
if (trim == null) {
|
||||||
|
return val -> val;
|
||||||
|
} else {
|
||||||
|
Pattern pattern = Pattern.compile("(^([" + trim + "]+))|([" + trim + "]+$)");
|
||||||
|
return val -> pattern.matcher(val).replaceAll("");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Function<String, String[]> buildSplitter(String split, boolean fields) {
|
||||||
|
int limit = fields ? 0 : 2;
|
||||||
|
if (split.length() > 2 || split.length() == 2 && split.charAt(0) != '\\') {
|
||||||
|
Pattern splitPattern = Pattern.compile(split);
|
||||||
|
return val -> splitPattern.split(val, limit);
|
||||||
|
} else {
|
||||||
|
return val -> val.split(split, limit);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
String getField() {
|
String getField() {
|
||||||
|
@ -86,7 +179,7 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
||||||
return ignoreMissing;
|
return ignoreMissing;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void append(IngestDocument document, String targetField, String value) {
|
private static void append(IngestDocument document, String targetField, String value) {
|
||||||
if (document.hasField(targetField)) {
|
if (document.hasField(targetField)) {
|
||||||
document.appendFieldValue(targetField, value);
|
document.appendFieldValue(targetField, value);
|
||||||
} else {
|
} else {
|
||||||
|
@ -96,27 +189,7 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void execute(IngestDocument document) {
|
public void execute(IngestDocument document) {
|
||||||
String oldVal = document.getFieldValue(field, String.class, ignoreMissing);
|
execution.accept(document);
|
||||||
|
|
||||||
if (oldVal == null && ignoreMissing) {
|
|
||||||
return;
|
|
||||||
} else if (oldVal == null) {
|
|
||||||
throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs.");
|
|
||||||
}
|
|
||||||
|
|
||||||
String fieldPathPrefix = (targetField == null) ? "" : targetField + ".";
|
|
||||||
Arrays.stream(oldVal.split(fieldSplit))
|
|
||||||
.map((f) -> {
|
|
||||||
String[] kv = f.split(valueSplit, 2);
|
|
||||||
if (kv.length != 2) {
|
|
||||||
throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]");
|
|
||||||
}
|
|
||||||
return kv;
|
|
||||||
})
|
|
||||||
.filter((p) ->
|
|
||||||
(includeKeys == null || includeKeys.contains(p[0])) &&
|
|
||||||
(excludeKeys == null || excludeKeys.contains(p[0]) == false))
|
|
||||||
.forEach((p) -> append(document, fieldPathPrefix + p[0], p[1]));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -132,6 +205,11 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
||||||
String targetField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "target_field");
|
String targetField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "target_field");
|
||||||
String fieldSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field_split");
|
String fieldSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field_split");
|
||||||
String valueSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "value_split");
|
String valueSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "value_split");
|
||||||
|
String trimKey = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_key");
|
||||||
|
String trimValue = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_value");
|
||||||
|
String prefix = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "prefix");
|
||||||
|
boolean stripBrackets =
|
||||||
|
ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "strip_brackets", false);
|
||||||
Set<String> includeKeys = null;
|
Set<String> includeKeys = null;
|
||||||
Set<String> excludeKeys = null;
|
Set<String> excludeKeys = null;
|
||||||
List<String> includeKeysList = ConfigurationUtils.readOptionalList(TYPE, processorTag, config, "include_keys");
|
List<String> includeKeysList = ConfigurationUtils.readOptionalList(TYPE, processorTag, config, "include_keys");
|
||||||
|
@ -143,7 +221,10 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
||||||
excludeKeys = Collections.unmodifiableSet(Sets.newHashSet(excludeKeysList));
|
excludeKeys = Collections.unmodifiableSet(Sets.newHashSet(excludeKeysList));
|
||||||
}
|
}
|
||||||
boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
|
boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
|
||||||
return new KeyValueProcessor(processorTag, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing);
|
return new KeyValueProcessor(
|
||||||
|
processorTag, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing,
|
||||||
|
trimKey, trimValue, stripBrackets, prefix
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,9 +35,13 @@ public final class LowercaseProcessor extends AbstractStringProcessor {
|
||||||
super(processorTag, field, ignoreMissing, targetField);
|
super(processorTag, field, ignoreMissing, targetField);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String apply(String value) {
|
||||||
|
return value.toLowerCase(Locale.ROOT);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String process(String value) {
|
protected String process(String value) {
|
||||||
return value.toLowerCase(Locale.ROOT);
|
return apply(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -17,23 +17,33 @@
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.index.settings;
|
package org.elasticsearch.ingest.common;
|
||||||
|
|
||||||
import org.elasticsearch.common.inject.BindingAnnotation;
|
import java.util.Map;
|
||||||
|
|
||||||
import java.lang.annotation.Documented;
|
public final class Processors {
|
||||||
import java.lang.annotation.Retention;
|
|
||||||
import java.lang.annotation.Target;
|
|
||||||
|
|
||||||
import static java.lang.annotation.ElementType.FIELD;
|
public static long bytes(String value) {
|
||||||
import static java.lang.annotation.ElementType.PARAMETER;
|
return BytesProcessor.apply(value);
|
||||||
import static java.lang.annotation.RetentionPolicy.RUNTIME;
|
}
|
||||||
|
|
||||||
|
public static String lowercase(String value) {
|
||||||
|
return LowercaseProcessor.apply(value);
|
||||||
|
}
|
||||||
|
|
||||||
@BindingAnnotation
|
public static String uppercase(String value) {
|
||||||
@Target({FIELD, PARAMETER})
|
return UppercaseProcessor.apply(value);
|
||||||
@Retention(RUNTIME)
|
}
|
||||||
@Documented
|
|
||||||
public @interface IndexDynamicSettings {
|
|
||||||
|
|
||||||
|
public static Object json(Object fieldValue) {
|
||||||
|
return JsonProcessor.apply(fieldValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void json(Map<String, Object> ctx, String field) {
|
||||||
|
JsonProcessor.apply(ctx, field);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String urlDecode(String value) {
|
||||||
|
return URLDecodeProcessor.apply(value);
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -0,0 +1,41 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.ingest.common;
|
||||||
|
|
||||||
|
import org.elasticsearch.painless.spi.PainlessExtension;
|
||||||
|
import org.elasticsearch.painless.spi.Whitelist;
|
||||||
|
import org.elasticsearch.painless.spi.WhitelistLoader;
|
||||||
|
import org.elasticsearch.script.IngestScript;
|
||||||
|
import org.elasticsearch.script.ScriptContext;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class ProcessorsWhitelistExtension implements PainlessExtension {
|
||||||
|
|
||||||
|
private static final Whitelist WHITELIST =
|
||||||
|
WhitelistLoader.loadFromResourceFiles(ProcessorsWhitelistExtension.class, "processors_whitelist.txt");
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<ScriptContext<?>, List<Whitelist>> getContextWhitelists() {
|
||||||
|
return Collections.singletonMap(IngestScript.CONTEXT, Collections.singletonList(WHITELIST));
|
||||||
|
}
|
||||||
|
}
|
|
@ -34,15 +34,19 @@ public final class URLDecodeProcessor extends AbstractStringProcessor {
|
||||||
super(processorTag, field, ignoreMissing, targetField);
|
super(processorTag, field, ignoreMissing, targetField);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public static String apply(String value) {
|
||||||
protected String process(String value) {
|
|
||||||
try {
|
try {
|
||||||
return URLDecoder.decode(value, "UTF-8");
|
return URLDecoder.decode(value, "UTF-8");
|
||||||
} catch (UnsupportedEncodingException e) {
|
} catch (UnsupportedEncodingException e) {
|
||||||
throw new IllegalArgumentException("could not URL-decode field[" + getField() + "]", e);
|
throw new IllegalArgumentException("Could not URL-decode value.", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String process(String value) {
|
||||||
|
return apply(value);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getType() {
|
public String getType() {
|
||||||
return TYPE;
|
return TYPE;
|
||||||
|
|
|
@ -34,9 +34,13 @@ public final class UppercaseProcessor extends AbstractStringProcessor {
|
||||||
super(processorTag, field, ignoreMissing, targetField);
|
super(processorTag, field, ignoreMissing, targetField);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String apply(String value) {
|
||||||
|
return value.toUpperCase(Locale.ROOT);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected String process(String value) {
|
protected String process(String value) {
|
||||||
return value.toUpperCase(Locale.ROOT);
|
return apply(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
org.elasticsearch.ingest.common.ProcessorsWhitelistExtension
|
|
@ -0,0 +1,29 @@
|
||||||
|
#
|
||||||
|
# Licensed to Elasticsearch under one or more contributor
|
||||||
|
# license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright
|
||||||
|
# ownership. Elasticsearch licenses this file to you under
|
||||||
|
# the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing,
|
||||||
|
# software distributed under the License is distributed on an
|
||||||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
# KIND, either express or implied. See the License for the
|
||||||
|
# specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
# This file contains a whitelist of static processor methods that can be accessed from painless
|
||||||
|
|
||||||
|
class org.elasticsearch.ingest.common.Processors {
|
||||||
|
long bytes(String)
|
||||||
|
String lowercase(String)
|
||||||
|
String uppercase(String)
|
||||||
|
Object json(Object)
|
||||||
|
void json(Map, String)
|
||||||
|
String urlDecode(String)
|
||||||
|
}
|
|
@ -63,7 +63,7 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase {
|
||||||
Processor processor = newProcessor(fieldName, randomBoolean(), fieldName);
|
Processor processor = newProcessor(fieldName, randomBoolean(), fieldName);
|
||||||
ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> processor.execute(ingestDocument));
|
ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> processor.execute(ingestDocument));
|
||||||
assertThat(exception.getMessage(),
|
assertThat(exception.getMessage(),
|
||||||
CoreMatchers.equalTo("failed to parse setting [" + fieldName + "] with value [8912pb] as a size in bytes"));
|
CoreMatchers.equalTo("failed to parse setting [Ingest Field] with value [8912pb] as a size in bytes"));
|
||||||
assertThat(exception.getCause().getMessage(),
|
assertThat(exception.getCause().getMessage(),
|
||||||
CoreMatchers.containsString("Values greater than 9223372036854775807 bytes are not supported"));
|
CoreMatchers.containsString("Values greater than 9223372036854775807 bytes are not supported"));
|
||||||
}
|
}
|
||||||
|
@ -93,6 +93,6 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase {
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(1126L));
|
assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(1126L));
|
||||||
assertWarnings("Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " +
|
assertWarnings("Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " +
|
||||||
"[" + fieldName + "]");
|
"[Ingest Field]");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -146,7 +146,6 @@ public class JsonProcessorTests extends ESTestCase {
|
||||||
assertThat(exception.getMessage(), equalTo("field [field] not present as part of path [field]"));
|
assertThat(exception.getMessage(), equalTo("field [field] not present as part of path [field]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public void testAddToRoot() throws Exception {
|
public void testAddToRoot() throws Exception {
|
||||||
String processorTag = randomAlphaOfLength(3);
|
String processorTag = randomAlphaOfLength(3);
|
||||||
String randomTargetField = randomAlphaOfLength(2);
|
String randomTargetField = randomAlphaOfLength(2);
|
||||||
|
|
|
@ -25,19 +25,25 @@ import org.elasticsearch.ingest.Processor;
|
||||||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
import org.elasticsearch.ingest.RandomDocumentPicks;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
|
import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
public class KeyValueProcessorTests extends ESTestCase {
|
public class KeyValueProcessorTests extends ESTestCase {
|
||||||
|
|
||||||
|
private static final KeyValueProcessor.Factory FACTORY = new KeyValueProcessor.Factory();
|
||||||
|
|
||||||
public void test() throws Exception {
|
public void test() throws Exception {
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, null, "target", false);
|
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false);
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
||||||
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
|
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
|
||||||
|
@ -46,7 +52,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
||||||
public void testRootTarget() throws Exception {
|
public void testRootTarget() throws Exception {
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
||||||
ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe");
|
ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe");
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "myField", "&", "=", null, null,null, false);
|
Processor processor = createKvProcessor("myField", "&", "=", null, null,null, false);
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
assertThat(ingestDocument.getFieldValue("first", String.class), equalTo("hello"));
|
assertThat(ingestDocument.getFieldValue("first", String.class), equalTo("hello"));
|
||||||
assertThat(ingestDocument.getFieldValue("second", List.class), equalTo(Arrays.asList("world", "universe")));
|
assertThat(ingestDocument.getFieldValue("second", List.class), equalTo(Arrays.asList("world", "universe")));
|
||||||
|
@ -55,7 +61,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
||||||
public void testKeySameAsSourceField() throws Exception {
|
public void testKeySameAsSourceField() throws Exception {
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
||||||
ingestDocument.setFieldValue("first", "first=hello");
|
ingestDocument.setFieldValue("first", "first=hello");
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "first", "&", "=", null, null,null, false);
|
Processor processor = createKvProcessor("first", "&", "=", null, null,null, false);
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
assertThat(ingestDocument.getFieldValue("first", List.class), equalTo(Arrays.asList("first=hello", "hello")));
|
assertThat(ingestDocument.getFieldValue("first", List.class), equalTo(Arrays.asList("first=hello", "hello")));
|
||||||
}
|
}
|
||||||
|
@ -63,7 +69,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
||||||
public void testIncludeKeys() throws Exception {
|
public void testIncludeKeys() throws Exception {
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=",
|
Processor processor = createKvProcessor(fieldName, "&", "=",
|
||||||
Sets.newHashSet("first"), null, "target", false);
|
Sets.newHashSet("first"), null, "target", false);
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
||||||
|
@ -73,7 +79,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
||||||
public void testExcludeKeys() throws Exception {
|
public void testExcludeKeys() throws Exception {
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=",
|
Processor processor = createKvProcessor(fieldName, "&", "=",
|
||||||
null, Sets.newHashSet("second"), "target", false);
|
null, Sets.newHashSet("second"), "target", false);
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
||||||
|
@ -84,7 +90,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument,
|
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument,
|
||||||
"first=hello&second=world&second=universe&third=bar");
|
"first=hello&second=world&second=universe&third=bar");
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=",
|
Processor processor = createKvProcessor(fieldName, "&", "=",
|
||||||
Sets.newHashSet("first", "second"), Sets.newHashSet("first", "second"), "target", false);
|
Sets.newHashSet("first", "second"), Sets.newHashSet("first", "second"), "target", false);
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
assertFalse(ingestDocument.hasField("target.first"));
|
assertFalse(ingestDocument.hasField("target.first"));
|
||||||
|
@ -92,9 +98,9 @@ public class KeyValueProcessorTests extends ESTestCase {
|
||||||
assertFalse(ingestDocument.hasField("target.third"));
|
assertFalse(ingestDocument.hasField("target.third"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMissingField() {
|
public void testMissingField() throws Exception {
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "&",
|
Processor processor = createKvProcessor("unknown", "&",
|
||||||
"=", null, null, "target", false);
|
"=", null, null, "target", false);
|
||||||
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
|
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
|
||||||
assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]"));
|
assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]"));
|
||||||
|
@ -105,7 +111,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
||||||
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(),
|
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(),
|
||||||
Collections.singletonMap(fieldName, null));
|
Collections.singletonMap(fieldName, null));
|
||||||
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
|
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "", "", null, null, "target", true);
|
Processor processor = createKvProcessor(fieldName, "", "", null, null, "target", true);
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
assertIngestDocument(originalIngestDocument, ingestDocument);
|
assertIngestDocument(originalIngestDocument, ingestDocument);
|
||||||
}
|
}
|
||||||
|
@ -113,7 +119,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
||||||
public void testNonExistentWithIgnoreMissing() throws Exception {
|
public void testNonExistentWithIgnoreMissing() throws Exception {
|
||||||
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
||||||
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
|
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "", "", null, null, "target", true);
|
Processor processor = createKvProcessor("unknown", "", "", null, null, "target", true);
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
assertIngestDocument(originalIngestDocument, ingestDocument);
|
assertIngestDocument(originalIngestDocument, ingestDocument);
|
||||||
}
|
}
|
||||||
|
@ -121,7 +127,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
||||||
public void testFailFieldSplitMatch() throws Exception {
|
public void testFailFieldSplitMatch() throws Exception {
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello|second=world|second=universe");
|
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello|second=world|second=universe");
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, null, "target", false);
|
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false);
|
||||||
processor.execute(ingestDocument);
|
processor.execute(ingestDocument);
|
||||||
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello|second=world|second=universe"));
|
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello|second=world|second=universe"));
|
||||||
assertFalse(ingestDocument.hasField("target.second"));
|
assertFalse(ingestDocument.hasField("target.second"));
|
||||||
|
@ -129,8 +135,94 @@ public class KeyValueProcessorTests extends ESTestCase {
|
||||||
|
|
||||||
public void testFailValueSplitMatch() throws Exception {
|
public void testFailValueSplitMatch() throws Exception {
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("foo", "bar"));
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("foo", "bar"));
|
||||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "foo", "&", "=", null, null, "target", false);
|
Processor processor = createKvProcessor("foo", "&", "=", null, null, "target", false);
|
||||||
Exception exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
|
Exception exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
|
||||||
assertThat(exception.getMessage(), equalTo("field [foo] does not contain value_split [=]"));
|
assertThat(exception.getMessage(), equalTo("field [foo] does not contain value_split [=]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testTrimKeyAndValue() throws Exception {
|
||||||
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||||
|
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first= hello &second=world& second =universe");
|
||||||
|
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, " ", " ", false, null);
|
||||||
|
processor.execute(ingestDocument);
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testTrimMultiCharSequence() throws Exception {
|
||||||
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||||
|
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument,
|
||||||
|
"to=<foo@example.com>, orig_to=<bar@example.com>, %+relay=mail.example.com[private/dovecot-lmtp]," +
|
||||||
|
" delay=2.2, delays=1.9/0.01/0.01/0.21, dsn=2.0.0, status=sent "
|
||||||
|
);
|
||||||
|
Processor processor = createKvProcessor(fieldName, " ", "=", null, null, "target", false, "%+", "<>,", false, null);
|
||||||
|
processor.execute(ingestDocument);
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.to", String.class), equalTo("foo@example.com"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.orig_to", String.class), equalTo("bar@example.com"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.relay", String.class), equalTo("mail.example.com[private/dovecot-lmtp]"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.delay", String.class), equalTo("2.2"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.delays", String.class), equalTo("1.9/0.01/0.01/0.21"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.dsn", String.class), equalTo("2.0.0"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.status", String.class), equalTo("sent"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testStripBrackets() throws Exception {
|
||||||
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||||
|
String fieldName = RandomDocumentPicks.addRandomField(
|
||||||
|
random(), ingestDocument, "first=<hello>&second=\"world\"&second=(universe)&third=<foo>&fourth=[bar]&fifth='last'"
|
||||||
|
);
|
||||||
|
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, true, null);
|
||||||
|
processor.execute(ingestDocument);
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.third", String.class), equalTo("foo"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.fourth", String.class), equalTo("bar"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.fifth", String.class), equalTo("last"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testAddPrefix() throws Exception {
|
||||||
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||||
|
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
||||||
|
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, false, "arg_");
|
||||||
|
processor.execute(ingestDocument);
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.arg_first", String.class), equalTo("hello"));
|
||||||
|
assertThat(ingestDocument.getFieldValue("target.arg_second", List.class), equalTo(Arrays.asList("world", "universe")));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
|
||||||
|
Set<String> excludeKeys, String targetField,
|
||||||
|
boolean ignoreMissing) throws Exception {
|
||||||
|
return createKvProcessor(
|
||||||
|
field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing, null, null, false, null
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
|
||||||
|
Set<String> excludeKeys, String targetField, boolean ignoreMissing,
|
||||||
|
String trimKey, String trimValue, boolean stripBrackets,
|
||||||
|
String prefix) throws Exception {
|
||||||
|
Map<String, Object> config = new HashMap<>();
|
||||||
|
config.put("field", field);
|
||||||
|
config.put("field_split", fieldSplit);
|
||||||
|
config.put("value_split", valueSplit);
|
||||||
|
config.put("target_field", targetField);
|
||||||
|
if (includeKeys != null) {
|
||||||
|
config.put("include_keys", new ArrayList<>(includeKeys));
|
||||||
|
}
|
||||||
|
if (excludeKeys != null) {
|
||||||
|
config.put("exclude_keys", new ArrayList<>(excludeKeys));
|
||||||
|
}
|
||||||
|
config.put("ignore_missing", ignoreMissing);
|
||||||
|
if (trimKey != null) {
|
||||||
|
config.put("trim_key", trimKey);
|
||||||
|
}
|
||||||
|
if (trimValue != null) {
|
||||||
|
config.put("trim_value", trimValue);
|
||||||
|
}
|
||||||
|
config.put("strip_brackets", stripBrackets);
|
||||||
|
if (prefix != null) {
|
||||||
|
config.put("prefix", prefix);
|
||||||
|
}
|
||||||
|
return FACTORY.create(null, randomAlphaOfLength(10), config);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,216 @@
|
||||||
|
---
|
||||||
|
teardown:
|
||||||
|
- do:
|
||||||
|
ingest.delete_pipeline:
|
||||||
|
id: "my_pipeline"
|
||||||
|
ignore: 404
|
||||||
|
|
||||||
|
---
|
||||||
|
"Test invoke bytes processor":
|
||||||
|
- do:
|
||||||
|
ingest.put_pipeline:
|
||||||
|
id: "my_pipeline"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"description": "_description",
|
||||||
|
"processors": [
|
||||||
|
{
|
||||||
|
"script" : {
|
||||||
|
"lang": "painless",
|
||||||
|
"source" : "ctx.target_field = Processors.bytes(ctx.source_field)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
- match: { acknowledged: true }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
pipeline: "my_pipeline"
|
||||||
|
body: {source_field: "1kb"}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
get:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
- match: { _source.source_field: "1kb" }
|
||||||
|
- match: { _source.target_field: 1024 }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Test invoke lowercase processor":
|
||||||
|
- do:
|
||||||
|
ingest.put_pipeline:
|
||||||
|
id: "my_pipeline"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"description": "_description",
|
||||||
|
"processors": [
|
||||||
|
{
|
||||||
|
"script" : {
|
||||||
|
"lang": "painless",
|
||||||
|
"source" : "ctx.target_field = Processors.lowercase(ctx.source_field)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
- match: { acknowledged: true }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
pipeline: "my_pipeline"
|
||||||
|
body: {source_field: "FooBar"}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
get:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
- match: { _source.source_field: "FooBar" }
|
||||||
|
- match: { _source.target_field: "foobar" }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Test invoke uppercase processor":
|
||||||
|
- do:
|
||||||
|
ingest.put_pipeline:
|
||||||
|
id: "my_pipeline"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"description": "_description",
|
||||||
|
"processors": [
|
||||||
|
{
|
||||||
|
"script" : {
|
||||||
|
"lang": "painless",
|
||||||
|
"source" : "ctx.target_field = Processors.uppercase(ctx.source_field)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
- match: { acknowledged: true }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
pipeline: "my_pipeline"
|
||||||
|
body: {source_field: "FooBar"}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
get:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
- match: { _source.source_field: "FooBar" }
|
||||||
|
- match: { _source.target_field: "FOOBAR" }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Test invoke json processor, assign to field":
|
||||||
|
- do:
|
||||||
|
ingest.put_pipeline:
|
||||||
|
id: "my_pipeline"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"description": "_description",
|
||||||
|
"processors": [
|
||||||
|
{
|
||||||
|
"script" : {
|
||||||
|
"lang": "painless",
|
||||||
|
"source" : "ctx.target_field = Processors.json(ctx.source_field)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
- match: { acknowledged: true }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
pipeline: "my_pipeline"
|
||||||
|
body: {source_field: "{\"foo\":\"bar\"}"}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
get:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
- match: { _source.source_field: "{\"foo\":\"bar\"}" }
|
||||||
|
- match: { _source.target_field.foo: "bar" }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Test invoke json processor, assign to root":
|
||||||
|
- do:
|
||||||
|
ingest.put_pipeline:
|
||||||
|
id: "my_pipeline"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"description": "_description",
|
||||||
|
"processors": [
|
||||||
|
{
|
||||||
|
"script" : {
|
||||||
|
"lang": "painless",
|
||||||
|
"source" : "Processors.json(ctx, 'source_field')"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
- match: { acknowledged: true }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
pipeline: "my_pipeline"
|
||||||
|
body: {source_field: "{\"foo\":\"bar\"}"}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
get:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
- match: { _source.source_field: "{\"foo\":\"bar\"}" }
|
||||||
|
- match: { _source.foo: "bar" }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Test invoke urlDecode processor":
|
||||||
|
- do:
|
||||||
|
ingest.put_pipeline:
|
||||||
|
id: "my_pipeline"
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"description": "_description",
|
||||||
|
"processors": [
|
||||||
|
{
|
||||||
|
"script" : {
|
||||||
|
"lang": "painless",
|
||||||
|
"source" : "ctx.target_field = Processors.urlDecode(ctx.source_field)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
- match: { acknowledged: true }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
pipeline: "my_pipeline"
|
||||||
|
body: {source_field: "foo%20bar"}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
get:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
- match: { _source.source_field: "foo%20bar" }
|
||||||
|
- match: { _source.target_field: "foo bar" }
|
|
@ -174,4 +174,4 @@ class org.elasticsearch.index.similarity.ScriptedSimilarity$Term {
|
||||||
class org.elasticsearch.index.similarity.ScriptedSimilarity$Doc {
|
class org.elasticsearch.index.similarity.ScriptedSimilarity$Doc {
|
||||||
int getLength()
|
int getLength()
|
||||||
float getFreq()
|
float getFreq()
|
||||||
}
|
}
|
|
@ -26,7 +26,7 @@ import java.util.Map;
|
||||||
|
|
||||||
public class InitializerTests extends ScriptTestCase {
|
public class InitializerTests extends ScriptTestCase {
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
@SuppressWarnings({"rawtypes"})
|
||||||
public void testArrayInitializers() {
|
public void testArrayInitializers() {
|
||||||
int[] ints = (int[])exec("new int[] {}");
|
int[] ints = (int[])exec("new int[] {}");
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ public class InitializerTests extends ScriptTestCase {
|
||||||
assertEquals("aaaaaa", objects[3]);
|
assertEquals("aaaaaa", objects[3]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
@SuppressWarnings({"rawtypes"})
|
||||||
public void testListInitializers() {
|
public void testListInitializers() {
|
||||||
List list = (List)exec("[]");
|
List list = (List)exec("[]");
|
||||||
|
|
||||||
|
@ -91,7 +91,7 @@ public class InitializerTests extends ScriptTestCase {
|
||||||
assertEquals("aaaaaa", list.get(3));
|
assertEquals("aaaaaa", list.get(3));
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
@SuppressWarnings({"rawtypes"})
|
||||||
public void testMapInitializers() {
|
public void testMapInitializers() {
|
||||||
Map map = (Map)exec("[:]");
|
Map map = (Map)exec("[:]");
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
{
|
{
|
||||||
"index_patterns": [
|
"index_patterns": ["filebeat-6.0.0-*"],
|
||||||
"filebeat-6.0.0-*"
|
|
||||||
],
|
|
||||||
"mappings": {
|
"mappings": {
|
||||||
"doc": {
|
"doc": {
|
||||||
"_meta": {
|
"_meta": {
|
||||||
|
@ -67,12 +65,14 @@
|
||||||
"type": "keyword"
|
"type": "keyword"
|
||||||
},
|
},
|
||||||
"country_iso_code": {
|
"country_iso_code": {
|
||||||
"ignore_above": 1024,
|
|
||||||
"type": "keyword"
|
"type": "keyword"
|
||||||
},
|
},
|
||||||
"location": {
|
"location": {
|
||||||
"type": "geo_point"
|
"type": "geo_point"
|
||||||
},
|
},
|
||||||
|
"region_iso_code": {
|
||||||
|
"type": "keyword"
|
||||||
|
},
|
||||||
"region_name": {
|
"region_name": {
|
||||||
"ignore_above": 1024,
|
"ignore_above": 1024,
|
||||||
"type": "keyword"
|
"type": "keyword"
|
||||||
|
|
|
@ -102,8 +102,8 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(queryId);
|
builder.startObject(queryId);
|
||||||
builder.field(QUALITY_LEVEL_FIELD.getPreferredName(), this.evaluationResult);
|
builder.field(QUALITY_LEVEL_FIELD.getPreferredName(), this.evaluationResult);
|
||||||
builder.startArray(UNKNOWN_DOCS_FIELD.getPreferredName());
|
builder.startArray(UNRATED_DOCS_FIELD.getPreferredName());
|
||||||
for (DocumentKey key : EvaluationMetric.filterUnknownDocuments(ratedHits)) {
|
for (DocumentKey key : EvaluationMetric.filterUnratedDocuments(ratedHits)) {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field(RatedDocument.INDEX_FIELD.getPreferredName(), key.getIndex());
|
builder.field(RatedDocument.INDEX_FIELD.getPreferredName(), key.getIndex());
|
||||||
builder.field(RatedDocument.DOC_ID_FIELD.getPreferredName(), key.getDocId());
|
builder.field(RatedDocument.DOC_ID_FIELD.getPreferredName(), key.getDocId());
|
||||||
|
@ -123,7 +123,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final ParseField QUALITY_LEVEL_FIELD = new ParseField("quality_level");
|
private static final ParseField QUALITY_LEVEL_FIELD = new ParseField("quality_level");
|
||||||
private static final ParseField UNKNOWN_DOCS_FIELD = new ParseField("unknown_docs");
|
private static final ParseField UNRATED_DOCS_FIELD = new ParseField("unrated_docs");
|
||||||
private static final ParseField HITS_FIELD = new ParseField("hits");
|
private static final ParseField HITS_FIELD = new ParseField("hits");
|
||||||
private static final ParseField METRIC_DETAILS_FIELD = new ParseField("metric_details");
|
private static final ParseField METRIC_DETAILS_FIELD = new ParseField("metric_details");
|
||||||
private static final ObjectParser<ParsedEvalQueryQuality, Void> PARSER = new ObjectParser<>("eval_query_quality",
|
private static final ObjectParser<ParsedEvalQueryQuality, Void> PARSER = new ObjectParser<>("eval_query_quality",
|
||||||
|
|
|
@ -76,10 +76,9 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable {
|
||||||
/**
|
/**
|
||||||
* filter @link {@link RatedSearchHit} that don't have a rating
|
* filter @link {@link RatedSearchHit} that don't have a rating
|
||||||
*/
|
*/
|
||||||
static List<DocumentKey> filterUnknownDocuments(List<RatedSearchHit> ratedHits) {
|
static List<DocumentKey> filterUnratedDocuments(List<RatedSearchHit> ratedHits) {
|
||||||
List<DocumentKey> unknownDocs = ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false)
|
return ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false)
|
||||||
.map(hit -> new DocumentKey(hit.getSearchHit().getIndex(), hit.getSearchHit().getId())).collect(Collectors.toList());
|
.map(hit -> new DocumentKey(hit.getSearchHit().getIndex(), hit.getSearchHit().getId())).collect(Collectors.toList());
|
||||||
return unknownDocs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -40,7 +40,7 @@ import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
|
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments;
|
||||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||||
import static org.hamcrest.CoreMatchers.containsString;
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
|
@ -128,7 +128,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
||||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||||
EvalQueryQuality result = dcg.evaluate("id", hits, rated);
|
EvalQueryQuality result = dcg.evaluate("id", hits, rated);
|
||||||
assertEquals(12.779642067948913, result.getQualityLevel(), DELTA);
|
assertEquals(12.779642067948913, result.getQualityLevel(), DELTA);
|
||||||
assertEquals(2, filterUnknownDocuments(result.getHitsAndRatings()).size());
|
assertEquals(2, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check with normalization: to get the maximal possible dcg, sort documents by
|
* Check with normalization: to get the maximal possible dcg, sort documents by
|
||||||
|
@ -185,7 +185,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
||||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||||
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
|
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
|
||||||
assertEquals(12.392789260714371, result.getQualityLevel(), DELTA);
|
assertEquals(12.392789260714371, result.getQualityLevel(), DELTA);
|
||||||
assertEquals(1, filterUnknownDocuments(result.getHitsAndRatings()).size());
|
assertEquals(1, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check with normalization: to get the maximal possible dcg, sort documents by
|
* Check with normalization: to get the maximal possible dcg, sort documents by
|
||||||
|
@ -224,13 +224,13 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
||||||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||||
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
|
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
|
||||||
assertEquals(0.0d, result.getQualityLevel(), DELTA);
|
assertEquals(0.0d, result.getQualityLevel(), DELTA);
|
||||||
assertEquals(0, filterUnknownDocuments(result.getHitsAndRatings()).size());
|
assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||||
|
|
||||||
// also check normalized
|
// also check normalized
|
||||||
dcg = new DiscountedCumulativeGain(true, null, 10);
|
dcg = new DiscountedCumulativeGain(true, null, 10);
|
||||||
result = dcg.evaluate("id", hits, ratedDocs);
|
result = dcg.evaluate("id", hits, ratedDocs);
|
||||||
assertEquals(0.0d, result.getQualityLevel(), DELTA);
|
assertEquals(0.0d, result.getQualityLevel(), DELTA);
|
||||||
assertEquals(0, filterUnknownDocuments(result.getHitsAndRatings()).size());
|
assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParseFromXContent() throws IOException {
|
public void testParseFromXContent() throws IOException {
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.rankeval.RatedDocument.DocumentKey;
|
|
||||||
import org.elasticsearch.search.SearchShardTarget;
|
import org.elasticsearch.search.SearchShardTarget;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
@ -52,11 +51,6 @@ public class EvalQueryQualityTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static EvalQueryQuality randomEvalQueryQuality() {
|
public static EvalQueryQuality randomEvalQueryQuality() {
|
||||||
List<DocumentKey> unknownDocs = new ArrayList<>();
|
|
||||||
int numberOfUnknownDocs = randomInt(5);
|
|
||||||
for (int i = 0; i < numberOfUnknownDocs; i++) {
|
|
||||||
unknownDocs.add(new DocumentKey(randomAlphaOfLength(10), randomAlphaOfLength(10)));
|
|
||||||
}
|
|
||||||
int numberOfSearchHits = randomInt(5);
|
int numberOfSearchHits = randomInt(5);
|
||||||
List<RatedSearchHit> ratedHits = new ArrayList<>();
|
List<RatedSearchHit> ratedHits = new ArrayList<>();
|
||||||
for (int i = 0; i < numberOfSearchHits; i++) {
|
for (int i = 0; i < numberOfSearchHits; i++) {
|
||||||
|
|
|
@ -40,7 +40,7 @@ import java.util.List;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
|
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
||||||
for (Entry<String, EvalQueryQuality> entry : entrySet) {
|
for (Entry<String, EvalQueryQuality> entry : entrySet) {
|
||||||
EvalQueryQuality quality = entry.getValue();
|
EvalQueryQuality quality = entry.getValue();
|
||||||
if (entry.getKey() == "amsterdam_query") {
|
if (entry.getKey() == "amsterdam_query") {
|
||||||
assertEquals(2, filterUnknownDocuments(quality.getHitsAndRatings()).size());
|
assertEquals(2, filterUnratedDocuments(quality.getHitsAndRatings()).size());
|
||||||
List<RatedSearchHit> hitsAndRatings = quality.getHitsAndRatings();
|
List<RatedSearchHit> hitsAndRatings = quality.getHitsAndRatings();
|
||||||
assertEquals(6, hitsAndRatings.size());
|
assertEquals(6, hitsAndRatings.size());
|
||||||
for (RatedSearchHit hit : hitsAndRatings) {
|
for (RatedSearchHit hit : hitsAndRatings) {
|
||||||
|
@ -133,7 +133,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (entry.getKey() == "berlin_query") {
|
if (entry.getKey() == "berlin_query") {
|
||||||
assertEquals(5, filterUnknownDocuments(quality.getHitsAndRatings()).size());
|
assertEquals(5, filterUnratedDocuments(quality.getHitsAndRatings()).size());
|
||||||
List<RatedSearchHit> hitsAndRatings = quality.getHitsAndRatings();
|
List<RatedSearchHit> hitsAndRatings = quality.getHitsAndRatings();
|
||||||
assertEquals(6, hitsAndRatings.size());
|
assertEquals(6, hitsAndRatings.size());
|
||||||
for (RatedSearchHit hit : hitsAndRatings) {
|
for (RatedSearchHit hit : hitsAndRatings) {
|
||||||
|
|
|
@ -158,7 +158,7 @@ public class RankEvalResponseTests extends ESTestCase {
|
||||||
" \"details\": {" +
|
" \"details\": {" +
|
||||||
" \"coffee_query\": {" +
|
" \"coffee_query\": {" +
|
||||||
" \"quality_level\": 0.1," +
|
" \"quality_level\": 0.1," +
|
||||||
" \"unknown_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," +
|
" \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," +
|
||||||
" \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"123\",\"_score\":1.0}," +
|
" \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"123\",\"_score\":1.0}," +
|
||||||
" \"rating\":5}," +
|
" \"rating\":5}," +
|
||||||
" {\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"456\",\"_score\":1.0}," +
|
" {\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"456\",\"_score\":1.0}," +
|
||||||
|
|
|
@ -73,7 +73,7 @@ setup:
|
||||||
|
|
||||||
- match: { quality_level: 1}
|
- match: { quality_level: 1}
|
||||||
- match: { details.amsterdam_query.quality_level: 1.0}
|
- match: { details.amsterdam_query.quality_level: 1.0}
|
||||||
- match: { details.amsterdam_query.unknown_docs: [ {"_index": "foo", "_id": "doc4"}]}
|
- match: { details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]}
|
||||||
- match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 2, "docs_retrieved": 2}}
|
- match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 2, "docs_retrieved": 2}}
|
||||||
|
|
||||||
- length: { details.amsterdam_query.hits: 3}
|
- length: { details.amsterdam_query.hits: 3}
|
||||||
|
@ -85,7 +85,7 @@ setup:
|
||||||
- is_false: details.amsterdam_query.hits.2.rating
|
- is_false: details.amsterdam_query.hits.2.rating
|
||||||
|
|
||||||
- match: { details.berlin_query.quality_level: 1.0}
|
- match: { details.berlin_query.quality_level: 1.0}
|
||||||
- match: { details.berlin_query.unknown_docs: [ {"_index": "foo", "_id": "doc4"}]}
|
- match: { details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]}
|
||||||
- match: { details.berlin_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}}
|
- match: { details.berlin_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}}
|
||||||
- length: { details.berlin_query.hits: 2}
|
- length: { details.berlin_query.hits: 2}
|
||||||
- match: { details.berlin_query.hits.0.hit._id: "doc1" }
|
- match: { details.berlin_query.hits.0.hit._id: "doc1" }
|
||||||
|
@ -155,9 +155,9 @@ setup:
|
||||||
- gt: {details.amsterdam_query.quality_level: 0.333}
|
- gt: {details.amsterdam_query.quality_level: 0.333}
|
||||||
- lt: {details.amsterdam_query.quality_level: 0.334}
|
- lt: {details.amsterdam_query.quality_level: 0.334}
|
||||||
- match: {details.amsterdam_query.metric_details.mean_reciprocal_rank: {"first_relevant": 3}}
|
- match: {details.amsterdam_query.metric_details.mean_reciprocal_rank: {"first_relevant": 3}}
|
||||||
- match: {details.amsterdam_query.unknown_docs: [ {"_index": "foo", "_id": "doc2"},
|
- match: {details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc2"},
|
||||||
{"_index": "foo", "_id": "doc3"} ]}
|
{"_index": "foo", "_id": "doc3"} ]}
|
||||||
- match: {details.berlin_query.quality_level: 0.5}
|
- match: {details.berlin_query.quality_level: 0.5}
|
||||||
- match: {details.berlin_query.metric_details.mean_reciprocal_rank: {"first_relevant": 2}}
|
- match: {details.berlin_query.metric_details.mean_reciprocal_rank: {"first_relevant": 2}}
|
||||||
- match: {details.berlin_query.unknown_docs: [ {"_index": "foo", "_id": "doc1"}]}
|
- match: {details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc1"}]}
|
||||||
|
|
||||||
|
|
|
@ -73,7 +73,7 @@
|
||||||
- lt: {quality_level: 13.848264 }
|
- lt: {quality_level: 13.848264 }
|
||||||
- gt: {details.dcg_query.quality_level: 13.848263}
|
- gt: {details.dcg_query.quality_level: 13.848263}
|
||||||
- lt: {details.dcg_query.quality_level: 13.848264}
|
- lt: {details.dcg_query.quality_level: 13.848264}
|
||||||
- match: {details.dcg_query.unknown_docs: [ ]}
|
- match: {details.dcg_query.unrated_docs: [ ]}
|
||||||
|
|
||||||
# reverse the order in which the results are returned (less relevant docs first)
|
# reverse the order in which the results are returned (less relevant docs first)
|
||||||
|
|
||||||
|
@ -100,7 +100,7 @@
|
||||||
- lt: {quality_level: 10.299675}
|
- lt: {quality_level: 10.299675}
|
||||||
- gt: {details.dcg_query_reverse.quality_level: 10.299674}
|
- gt: {details.dcg_query_reverse.quality_level: 10.299674}
|
||||||
- lt: {details.dcg_query_reverse.quality_level: 10.299675}
|
- lt: {details.dcg_query_reverse.quality_level: 10.299675}
|
||||||
- match: {details.dcg_query_reverse.unknown_docs: [ ]}
|
- match: {details.dcg_query_reverse.unrated_docs: [ ]}
|
||||||
|
|
||||||
# if we mix both, we should get the average
|
# if we mix both, we should get the average
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@
|
||||||
- lt: {quality_level: 12.073970}
|
- lt: {quality_level: 12.073970}
|
||||||
- gt: {details.dcg_query.quality_level: 13.848263}
|
- gt: {details.dcg_query.quality_level: 13.848263}
|
||||||
- lt: {details.dcg_query.quality_level: 13.848264}
|
- lt: {details.dcg_query.quality_level: 13.848264}
|
||||||
- match: {details.dcg_query.unknown_docs: [ ]}
|
- match: {details.dcg_query.unrated_docs: [ ]}
|
||||||
- gt: {details.dcg_query_reverse.quality_level: 10.299674}
|
- gt: {details.dcg_query_reverse.quality_level: 10.299674}
|
||||||
- lt: {details.dcg_query_reverse.quality_level: 10.299675}
|
- lt: {details.dcg_query_reverse.quality_level: 10.299675}
|
||||||
- match: {details.dcg_query_reverse.unknown_docs: [ ]}
|
- match: {details.dcg_query_reverse.unrated_docs: [ ]}
|
||||||
|
|
|
@ -36,7 +36,7 @@
|
||||||
|
|
||||||
- match: { quality_level: 1}
|
- match: { quality_level: 1}
|
||||||
- match: { details.amsterdam_query.quality_level: 1.0}
|
- match: { details.amsterdam_query.quality_level: 1.0}
|
||||||
- match: { details.amsterdam_query.unknown_docs: [ ]}
|
- match: { details.amsterdam_query.unrated_docs: [ ]}
|
||||||
- match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}}
|
- match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}}
|
||||||
|
|
||||||
- is_true: failures.invalid_query
|
- is_true: failures.invalid_query
|
||||||
|
|
|
@ -85,7 +85,7 @@ setup:
|
||||||
}
|
}
|
||||||
|
|
||||||
- match: {quality_level: 0.9}
|
- match: {quality_level: 0.9}
|
||||||
- match: {details.amsterdam_query.unknown_docs.0._id: "6"}
|
- match: {details.amsterdam_query.unrated_docs.0._id: "6"}
|
||||||
|
|
||||||
---
|
---
|
||||||
"Test illegal request parts":
|
"Test illegal request parts":
|
||||||
|
|
|
@ -57,7 +57,6 @@ public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler<Upda
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOException {
|
protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOException {
|
||||||
/*
|
/*
|
||||||
* Passing the search request through UpdateByQueryRequest first allows
|
* Passing the search request through UpdateByQueryRequest first allows
|
||||||
|
|
|
@ -90,7 +90,6 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase
|
||||||
@Override
|
@Override
|
||||||
protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException {
|
protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException {
|
||||||
final Netty4Transport t = (Netty4Transport) transport;
|
final Netty4Transport t = (Netty4Transport) transport;
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection;
|
final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection;
|
||||||
CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true);
|
CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -185,6 +185,16 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||||
geoData.put("continent_name", continentName);
|
geoData.put("continent_name", continentName);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
case REGION_ISO_CODE:
|
||||||
|
// ISO 3166-2 code for country subdivisions.
|
||||||
|
// See iso.org/iso-3166-country-codes.html
|
||||||
|
String countryIso = country.getIsoCode();
|
||||||
|
String subdivisionIso = subdivision.getIsoCode();
|
||||||
|
if (countryIso != null && subdivisionIso != null) {
|
||||||
|
String regionIsoCode = countryIso + "-" + subdivisionIso;
|
||||||
|
geoData.put("region_iso_code", regionIsoCode);
|
||||||
|
}
|
||||||
|
break;
|
||||||
case REGION_NAME:
|
case REGION_NAME:
|
||||||
String subdivisionName = subdivision.getName();
|
String subdivisionName = subdivision.getName();
|
||||||
if (subdivisionName != null) {
|
if (subdivisionName != null) {
|
||||||
|
@ -300,8 +310,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||||
|
|
||||||
public static final class Factory implements Processor.Factory {
|
public static final class Factory implements Processor.Factory {
|
||||||
static final Set<Property> DEFAULT_CITY_PROPERTIES = EnumSet.of(
|
static final Set<Property> DEFAULT_CITY_PROPERTIES = EnumSet.of(
|
||||||
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME,
|
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_ISO_CODE,
|
||||||
Property.CITY_NAME, Property.LOCATION
|
Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION
|
||||||
);
|
);
|
||||||
static final Set<Property> DEFAULT_COUNTRY_PROPERTIES = EnumSet.of(
|
static final Set<Property> DEFAULT_COUNTRY_PROPERTIES = EnumSet.of(
|
||||||
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE
|
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE
|
||||||
|
@ -377,6 +387,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||||
COUNTRY_ISO_CODE,
|
COUNTRY_ISO_CODE,
|
||||||
COUNTRY_NAME,
|
COUNTRY_NAME,
|
||||||
CONTINENT_NAME,
|
CONTINENT_NAME,
|
||||||
|
REGION_ISO_CODE,
|
||||||
REGION_NAME,
|
REGION_NAME,
|
||||||
CITY_NAME,
|
CITY_NAME,
|
||||||
TIMEZONE,
|
TIMEZONE,
|
||||||
|
@ -386,7 +397,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||||
|
|
||||||
static final EnumSet<Property> ALL_CITY_PROPERTIES = EnumSet.of(
|
static final EnumSet<Property> ALL_CITY_PROPERTIES = EnumSet.of(
|
||||||
Property.IP, Property.COUNTRY_ISO_CODE, Property.COUNTRY_NAME, Property.CONTINENT_NAME,
|
Property.IP, Property.COUNTRY_ISO_CODE, Property.COUNTRY_NAME, Property.CONTINENT_NAME,
|
||||||
Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE, Property.LOCATION
|
Property.REGION_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE,
|
||||||
|
Property.LOCATION
|
||||||
);
|
);
|
||||||
static final EnumSet<Property> ALL_COUNTRY_PROPERTIES = EnumSet.of(
|
static final EnumSet<Property> ALL_COUNTRY_PROPERTIES = EnumSet.of(
|
||||||
Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE
|
Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE
|
||||||
|
|
|
@ -284,7 +284,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
|
||||||
config1.put("properties", Collections.singletonList("invalid"));
|
config1.put("properties", Collections.singletonList("invalid"));
|
||||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config1));
|
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config1));
|
||||||
assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " +
|
assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " +
|
||||||
"COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]"));
|
"COUNTRY_NAME, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]"));
|
||||||
|
|
||||||
Map<String, Object> config2 = new HashMap<>();
|
Map<String, Object> config2 = new HashMap<>();
|
||||||
config2.put("field", "_field");
|
config2.put("field", "_field");
|
||||||
|
|
|
@ -117,11 +117,12 @@ public class GeoIpProcessorTests extends ESTestCase {
|
||||||
assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(address));
|
assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(address));
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
Map<String, Object> geoData = (Map<String, Object>) ingestDocument.getSourceAndMetadata().get("target_field");
|
Map<String, Object> geoData = (Map<String, Object>) ingestDocument.getSourceAndMetadata().get("target_field");
|
||||||
assertThat(geoData.size(), equalTo(8));
|
assertThat(geoData.size(), equalTo(9));
|
||||||
assertThat(geoData.get("ip"), equalTo(address));
|
assertThat(geoData.get("ip"), equalTo(address));
|
||||||
assertThat(geoData.get("country_iso_code"), equalTo("US"));
|
assertThat(geoData.get("country_iso_code"), equalTo("US"));
|
||||||
assertThat(geoData.get("country_name"), equalTo("United States"));
|
assertThat(geoData.get("country_name"), equalTo("United States"));
|
||||||
assertThat(geoData.get("continent_name"), equalTo("North America"));
|
assertThat(geoData.get("continent_name"), equalTo("North America"));
|
||||||
|
assertThat(geoData.get("region_iso_code"), equalTo("US-FL"));
|
||||||
assertThat(geoData.get("region_name"), equalTo("Florida"));
|
assertThat(geoData.get("region_name"), equalTo("Florida"));
|
||||||
assertThat(geoData.get("city_name"), equalTo("Hollywood"));
|
assertThat(geoData.get("city_name"), equalTo("Hollywood"));
|
||||||
assertThat(geoData.get("timezone"), equalTo("America/New_York"));
|
assertThat(geoData.get("timezone"), equalTo("America/New_York"));
|
||||||
|
|
|
@ -30,11 +30,12 @@
|
||||||
type: test
|
type: test
|
||||||
id: 1
|
id: 1
|
||||||
- match: { _source.field1: "128.101.101.101" }
|
- match: { _source.field1: "128.101.101.101" }
|
||||||
- length: { _source.geoip: 5 }
|
- length: { _source.geoip: 6 }
|
||||||
- match: { _source.geoip.city_name: "Minneapolis" }
|
- match: { _source.geoip.city_name: "Minneapolis" }
|
||||||
- match: { _source.geoip.country_iso_code: "US" }
|
- match: { _source.geoip.country_iso_code: "US" }
|
||||||
- match: { _source.geoip.location.lon: -93.2166 }
|
- match: { _source.geoip.location.lon: -93.2166 }
|
||||||
- match: { _source.geoip.location.lat: 44.9759 }
|
- match: { _source.geoip.location.lat: 44.9759 }
|
||||||
|
- match: { _source.geoip.region_iso_code: "US-MN" }
|
||||||
- match: { _source.geoip.region_name: "Minnesota" }
|
- match: { _source.geoip.region_name: "Minnesota" }
|
||||||
- match: { _source.geoip.continent_name: "North America" }
|
- match: { _source.geoip.continent_name: "North America" }
|
||||||
|
|
||||||
|
@ -54,7 +55,7 @@
|
||||||
{
|
{
|
||||||
"geoip" : {
|
"geoip" : {
|
||||||
"field" : "field1",
|
"field" : "field1",
|
||||||
"properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_name", "continent_name"]
|
"properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_iso_code", "region_name", "continent_name"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -75,7 +76,7 @@
|
||||||
type: test
|
type: test
|
||||||
id: 1
|
id: 1
|
||||||
- match: { _source.field1: "128.101.101.101" }
|
- match: { _source.field1: "128.101.101.101" }
|
||||||
- length: { _source.geoip: 8 }
|
- length: { _source.geoip: 9 }
|
||||||
- match: { _source.geoip.city_name: "Minneapolis" }
|
- match: { _source.geoip.city_name: "Minneapolis" }
|
||||||
- match: { _source.geoip.country_iso_code: "US" }
|
- match: { _source.geoip.country_iso_code: "US" }
|
||||||
- match: { _source.geoip.ip: "128.101.101.101" }
|
- match: { _source.geoip.ip: "128.101.101.101" }
|
||||||
|
@ -83,6 +84,7 @@
|
||||||
- match: { _source.geoip.location.lat: 44.9759 }
|
- match: { _source.geoip.location.lat: 44.9759 }
|
||||||
- match: { _source.geoip.timezone: "America/Chicago" }
|
- match: { _source.geoip.timezone: "America/Chicago" }
|
||||||
- match: { _source.geoip.country_name: "United States" }
|
- match: { _source.geoip.country_name: "United States" }
|
||||||
|
- match: { _source.geoip.region_iso_code: "US-MN" }
|
||||||
- match: { _source.geoip.region_name: "Minnesota" }
|
- match: { _source.geoip.region_name: "Minnesota" }
|
||||||
- match: { _source.geoip.continent_name: "North America" }
|
- match: { _source.geoip.continent_name: "North America" }
|
||||||
|
|
||||||
|
@ -188,11 +190,12 @@
|
||||||
type: test
|
type: test
|
||||||
id: 2
|
id: 2
|
||||||
- match: { _source.field1: "128.101.101.101" }
|
- match: { _source.field1: "128.101.101.101" }
|
||||||
- length: { _source.geoip: 5 }
|
- length: { _source.geoip: 6 }
|
||||||
- match: { _source.geoip.city_name: "Minneapolis" }
|
- match: { _source.geoip.city_name: "Minneapolis" }
|
||||||
- match: { _source.geoip.country_iso_code: "US" }
|
- match: { _source.geoip.country_iso_code: "US" }
|
||||||
- match: { _source.geoip.location.lon: -93.2166 }
|
- match: { _source.geoip.location.lon: -93.2166 }
|
||||||
- match: { _source.geoip.location.lat: 44.9759 }
|
- match: { _source.geoip.location.lat: 44.9759 }
|
||||||
|
- match: { _source.geoip.region_iso_code: "US-MN" }
|
||||||
- match: { _source.geoip.region_name: "Minnesota" }
|
- match: { _source.geoip.region_name: "Minnesota" }
|
||||||
- match: { _source.geoip.continent_name: "North America" }
|
- match: { _source.geoip.continent_name: "North America" }
|
||||||
|
|
||||||
|
|
|
@ -32,6 +32,7 @@ import io.netty.handler.codec.http.HttpResponseDecoder;
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus;
|
import io.netty.handler.codec.http.HttpResponseStatus;
|
||||||
import io.netty.handler.codec.http.HttpUtil;
|
import io.netty.handler.codec.http.HttpUtil;
|
||||||
import io.netty.handler.codec.http.HttpVersion;
|
import io.netty.handler.codec.http.HttpVersion;
|
||||||
|
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
|
@ -89,7 +90,6 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
|
||||||
private final ResponseDecoder responseDecoder = new ResponseDecoder();
|
private final ResponseDecoder responseDecoder = new ResponseDecoder();
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public void setMocks() {
|
public void setMocks() {
|
||||||
transport = mock(NioHttpServerTransport.class);
|
transport = mock(NioHttpServerTransport.class);
|
||||||
Settings settings = Settings.EMPTY;
|
Settings settings = Settings.EMPTY;
|
||||||
|
|
|
@ -95,7 +95,6 @@ public class SimpleNioTransportTests extends AbstractSimpleTransportTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException {
|
protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException {
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection;
|
TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection;
|
||||||
CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true);
|
CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,9 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.upgrades;
|
package org.elasticsearch.upgrades;
|
||||||
|
|
||||||
import org.apache.http.HttpEntity;
|
|
||||||
import org.apache.http.entity.ContentType;
|
|
||||||
import org.apache.http.entity.StringEntity;
|
|
||||||
import org.apache.http.util.EntityUtils;
|
import org.apache.http.util.EntityUtils;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.client.Request;
|
import org.elasticsearch.client.Request;
|
||||||
|
@ -34,7 +31,6 @@ import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||||
import org.elasticsearch.test.NotEqualMessageBuilder;
|
import org.elasticsearch.test.NotEqualMessageBuilder;
|
||||||
|
@ -45,7 +41,6 @@ import org.junit.Before;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Base64;
|
import java.util.Base64;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -142,8 +137,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
}
|
}
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
Request createIndex = new Request("PUT", "/" + index);
|
||||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||||
|
client().performRequest(createIndex);
|
||||||
|
|
||||||
count = randomIntBetween(2000, 3000);
|
count = randomIntBetween(2000, 3000);
|
||||||
byte[] randomByteArray = new byte[16];
|
byte[] randomByteArray = new byte[16];
|
||||||
|
@ -164,16 +160,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
count = countOfIndexedRandomDocuments();
|
count = countOfIndexedRandomDocuments();
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, String> params = new HashMap<>();
|
ensureGreenLongWait(index);
|
||||||
params.put("timeout", "2m");
|
|
||||||
params.put("wait_for_status", "green");
|
|
||||||
params.put("wait_for_no_relocating_shards", "true");
|
|
||||||
params.put("wait_for_events", "languid");
|
|
||||||
Map<String, Object> healthRsp = toMap(client().performRequest("GET", "/_cluster/health/" + index, params));
|
|
||||||
logger.info("health api response: {}", healthRsp);
|
|
||||||
assertEquals("green", healthRsp.get("status"));
|
|
||||||
assertFalse((Boolean) healthRsp.get("timed_out"));
|
|
||||||
|
|
||||||
assertBasicSearchWorks(count);
|
assertBasicSearchWorks(count);
|
||||||
assertAllSearchWorks(count);
|
assertAllSearchWorks(count);
|
||||||
assertBasicAggregationWorks();
|
assertBasicAggregationWorks();
|
||||||
|
@ -205,8 +192,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
}
|
}
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
Request createIndex = new Request("PUT", "/" + index);
|
||||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||||
|
client().performRequest(createIndex);
|
||||||
|
|
||||||
int numDocs = randomIntBetween(2000, 3000);
|
int numDocs = randomIntBetween(2000, 3000);
|
||||||
indexRandomDocuments(numDocs, true, false, i -> {
|
indexRandomDocuments(numDocs, true, false, i -> {
|
||||||
|
@ -215,33 +203,26 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
.endObject();
|
.endObject();
|
||||||
});
|
});
|
||||||
logger.info("Refreshing [{}]", index);
|
logger.info("Refreshing [{}]", index);
|
||||||
client().performRequest("POST", "/" + index + "/_refresh");
|
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
|
||||||
} else {
|
} else {
|
||||||
final int numReplicas = 1;
|
final int numReplicas = 1;
|
||||||
final long startTime = System.currentTimeMillis();
|
final long startTime = System.currentTimeMillis();
|
||||||
logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, index);
|
logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, index);
|
||||||
String requestBody = "{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}";
|
Request setNumberOfReplicas = new Request("PUT", "/" + index + "/_settings");
|
||||||
Response response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
|
setNumberOfReplicas.setJsonEntity("{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}");
|
||||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
|
Response response = client().performRequest(setNumberOfReplicas);
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
Map<String, String> params = new HashMap<>();
|
ensureGreenLongWait(index);
|
||||||
params.put("timeout", "2m");
|
|
||||||
params.put("wait_for_status", "green");
|
|
||||||
params.put("wait_for_no_relocating_shards", "true");
|
|
||||||
params.put("wait_for_events", "languid");
|
|
||||||
Map<String, Object> healthRsp = toMap(client().performRequest("GET", "/_cluster/health/" + index, params));
|
|
||||||
assertEquals("green", healthRsp.get("status"));
|
|
||||||
assertFalse((Boolean) healthRsp.get("timed_out"));
|
|
||||||
|
|
||||||
logger.debug("--> index [{}] is green, took [{}] ms", index, (System.currentTimeMillis() - startTime));
|
logger.debug("--> index [{}] is green, took [{}] ms", index, (System.currentTimeMillis() - startTime));
|
||||||
Map<String, Object> recoverRsp = toMap(client().performRequest("GET", "/" + index + "/_recovery"));
|
Map<String, Object> recoverRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_recovery")));
|
||||||
logger.debug("--> recovery status:\n{}", recoverRsp);
|
logger.debug("--> recovery status:\n{}", recoverRsp);
|
||||||
|
|
||||||
Set<Integer> counts = new HashSet<>();
|
Set<Integer> counts = new HashSet<>();
|
||||||
for (String node : dataNodes(index, client())) {
|
for (String node : dataNodes(index, client())) {
|
||||||
Map<String, Object> responseBody = toMap(client().performRequest("GET", "/" + index + "/_search",
|
Request search = new Request("GET", "/" + index + "/_search");
|
||||||
Collections.singletonMap("preference", "_only_nodes:" + node)));
|
search.addParameter("preference", "_only_nodes:" + node);
|
||||||
|
Map<String, Object> responseBody = entityAsMap(client().performRequest(search));
|
||||||
assertNoFailures(responseBody);
|
assertNoFailures(responseBody);
|
||||||
int hits = (int) XContentMapValues.extractValue("hits.total", responseBody);
|
int hits = (int) XContentMapValues.extractValue("hits.total", responseBody);
|
||||||
counts.add(hits);
|
counts.add(hits);
|
||||||
|
@ -282,12 +263,13 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
}
|
}
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
Request createIndex = new Request("PUT", "/" + index);
|
||||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||||
|
client().performRequest(createIndex);
|
||||||
|
|
||||||
String aliasName = "%23" + index; // %23 == #
|
String aliasName = "%23" + index; // %23 == #
|
||||||
client().performRequest("PUT", "/" + index + "/_alias/" + aliasName);
|
client().performRequest(new Request("PUT", "/" + index + "/_alias/" + aliasName));
|
||||||
Response response = client().performRequest("HEAD", "/" + index + "/_alias/" + aliasName);
|
Response response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + aliasName));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
|
|
||||||
count = randomIntBetween(32, 128);
|
count = randomIntBetween(32, 128);
|
||||||
|
@ -301,19 +283,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
count = countOfIndexedRandomDocuments();
|
count = countOfIndexedRandomDocuments();
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.error("clusterState=" + toMap(client().performRequest("GET", "/_cluster/state",
|
Request request = new Request("GET", "/_cluster/state");
|
||||||
Collections.singletonMap("metric", "metadata"))));
|
request.addParameter("metric", "metadata");
|
||||||
|
logger.error("clusterState=" + entityAsMap(client().performRequest(request)));
|
||||||
// We can read from the alias just like we can read from the index.
|
// We can read from the alias just like we can read from the index.
|
||||||
String aliasName = "%23" + index; // %23 == #
|
String aliasName = "%23" + index; // %23 == #
|
||||||
Map<String, Object> searchRsp = toMap(client().performRequest("GET", "/" + aliasName + "/_search"));
|
Map<String, Object> searchRsp = entityAsMap(client().performRequest(new Request("GET", "/" + aliasName + "/_search")));
|
||||||
int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
||||||
assertEquals(count, totalHits);
|
assertEquals(count, totalHits);
|
||||||
if (runningAgainstOldCluster == false) {
|
if (runningAgainstOldCluster == false) {
|
||||||
// We can remove the alias.
|
// We can remove the alias.
|
||||||
Response response = client().performRequest("DELETE", "/" + index + "/_alias/" + aliasName);
|
Response response = client().performRequest(new Request("DELETE", "/" + index + "/_alias/" + aliasName));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||||
// and check that it is gone:
|
// and check that it is gone:
|
||||||
response = client().performRequest("HEAD", "/" + index + "/_alias/" + aliasName);
|
response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + aliasName));
|
||||||
assertEquals(404, response.getStatusLine().getStatusCode());
|
assertEquals(404, response.getStatusLine().getStatusCode());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -330,13 +313,14 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
}
|
}
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
client().performRequest("PUT", "/_template/template_1", Collections.emptyMap(),
|
Request createTemplate = new Request("PUT", "/_template/template_1");
|
||||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
createTemplate.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||||
client().performRequest("PUT", "/" + index);
|
client().performRequest(createTemplate);
|
||||||
|
client().performRequest(new Request("PUT", "/" + index));
|
||||||
}
|
}
|
||||||
|
|
||||||
// verifying if we can still read some properties from cluster state api:
|
// verifying if we can still read some properties from cluster state api:
|
||||||
Map<String, Object> clusterState = toMap(client().performRequest("GET", "/_cluster/state"));
|
Map<String, Object> clusterState = entityAsMap(client().performRequest(new Request("GET", "/_cluster/state")));
|
||||||
|
|
||||||
// Check some global properties:
|
// Check some global properties:
|
||||||
String clusterName = (String) clusterState.get("cluster_name");
|
String clusterName = (String) clusterState.get("cluster_name");
|
||||||
|
@ -381,8 +365,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
}
|
}
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
Request createIndex = new Request("PUT", "/" + index);
|
||||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||||
|
client().performRequest(createIndex);
|
||||||
|
|
||||||
numDocs = randomIntBetween(512, 1024);
|
numDocs = randomIntBetween(512, 1024);
|
||||||
indexRandomDocuments(numDocs, true, true, i -> {
|
indexRandomDocuments(numDocs, true, true, i -> {
|
||||||
|
@ -393,23 +378,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
|
|
||||||
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
|
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
|
||||||
|
|
||||||
String updateSettingsRequestBody = "{\"settings\": {\"index.blocks.write\": true}}";
|
Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings");
|
||||||
Response rsp = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
|
updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}");
|
||||||
new StringEntity(updateSettingsRequestBody, ContentType.APPLICATION_JSON));
|
client().performRequest(updateSettingsRequest);
|
||||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
String shrinkIndexRequestBody = "{\"settings\": {\"index.number_of_shards\": 1}}";
|
Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex);
|
||||||
rsp = client().performRequest("PUT", "/" + index + "/_shrink/" + shrunkenIndex, Collections.emptyMap(),
|
shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}");
|
||||||
new StringEntity(shrinkIndexRequestBody, ContentType.APPLICATION_JSON));
|
client().performRequest(shrinkIndexRequest);
|
||||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
rsp = client().performRequest("POST", "/_refresh");
|
client().performRequest(new Request("POST", "/_refresh"));
|
||||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
|
||||||
} else {
|
} else {
|
||||||
numDocs = countOfIndexedRandomDocuments();
|
numDocs = countOfIndexedRandomDocuments();
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<?, ?> response = toMap(client().performRequest("GET", "/" + index + "/_search"));
|
Map<?, ?> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
|
||||||
assertNoFailures(response);
|
assertNoFailures(response);
|
||||||
int totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
int totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
||||||
assertThat(totalShards, greaterThan(1));
|
assertThat(totalShards, greaterThan(1));
|
||||||
|
@ -418,7 +400,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
int totalHits = (int) XContentMapValues.extractValue("hits.total", response);
|
int totalHits = (int) XContentMapValues.extractValue("hits.total", response);
|
||||||
assertEquals(numDocs, totalHits);
|
assertEquals(numDocs, totalHits);
|
||||||
|
|
||||||
response = toMap(client().performRequest("GET", "/" + shrunkenIndex+ "/_search"));
|
response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex+ "/_search")));
|
||||||
assertNoFailures(response);
|
assertNoFailures(response);
|
||||||
totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
||||||
assertEquals(1, totalShards);
|
assertEquals(1, totalShards);
|
||||||
|
@ -448,8 +430,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
}
|
}
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
Request createIndex = new Request("PUT", "/" + index);
|
||||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||||
|
client().performRequest(createIndex);
|
||||||
|
|
||||||
numDocs = randomIntBetween(512, 1024);
|
numDocs = randomIntBetween(512, 1024);
|
||||||
indexRandomDocuments(numDocs, true, true, i -> {
|
indexRandomDocuments(numDocs, true, true, i -> {
|
||||||
|
@ -460,23 +443,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
} else {
|
} else {
|
||||||
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
|
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
|
||||||
|
|
||||||
String updateSettingsRequestBody = "{\"settings\": {\"index.blocks.write\": true}}";
|
Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings");
|
||||||
Response rsp = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
|
updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}");
|
||||||
new StringEntity(updateSettingsRequestBody, ContentType.APPLICATION_JSON));
|
client().performRequest(updateSettingsRequest);
|
||||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
String shrinkIndexRequestBody = "{\"settings\": {\"index.number_of_shards\": 1}}";
|
Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex);
|
||||||
rsp = client().performRequest("PUT", "/" + index + "/_shrink/" + shrunkenIndex, Collections.emptyMap(),
|
shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}");
|
||||||
new StringEntity(shrinkIndexRequestBody, ContentType.APPLICATION_JSON));
|
client().performRequest(shrinkIndexRequest);
|
||||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
numDocs = countOfIndexedRandomDocuments();
|
numDocs = countOfIndexedRandomDocuments();
|
||||||
}
|
}
|
||||||
|
|
||||||
Response rsp = client().performRequest("POST", "/_refresh");
|
client().performRequest(new Request("POST", "/_refresh"));
|
||||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
Map<?, ?> response = toMap(client().performRequest("GET", "/" + index + "/_search"));
|
Map<?, ?> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
|
||||||
assertNoFailures(response);
|
assertNoFailures(response);
|
||||||
int totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
int totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
||||||
assertThat(totalShards, greaterThan(1));
|
assertThat(totalShards, greaterThan(1));
|
||||||
|
@ -486,7 +466,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
assertEquals(numDocs, totalHits);
|
assertEquals(numDocs, totalHits);
|
||||||
|
|
||||||
if (runningAgainstOldCluster == false) {
|
if (runningAgainstOldCluster == false) {
|
||||||
response = toMap(client().performRequest("GET", "/" + shrunkenIndex + "/_search"));
|
response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex + "/_search")));
|
||||||
assertNoFailures(response);
|
assertNoFailures(response);
|
||||||
totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
||||||
assertEquals(1, totalShards);
|
assertEquals(1, totalShards);
|
||||||
|
@ -499,43 +479,48 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
|
|
||||||
void assertBasicSearchWorks(int count) throws IOException {
|
void assertBasicSearchWorks(int count) throws IOException {
|
||||||
logger.info("--> testing basic search");
|
logger.info("--> testing basic search");
|
||||||
Map<String, Object> response = toMap(client().performRequest("GET", "/" + index + "/_search"));
|
{
|
||||||
assertNoFailures(response);
|
Map<String, Object> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
|
||||||
int numDocs = (int) XContentMapValues.extractValue("hits.total", response);
|
assertNoFailures(response);
|
||||||
logger.info("Found {} in old index", numDocs);
|
int numDocs = (int) XContentMapValues.extractValue("hits.total", response);
|
||||||
assertEquals(count, numDocs);
|
logger.info("Found {} in old index", numDocs);
|
||||||
|
assertEquals(count, numDocs);
|
||||||
|
}
|
||||||
|
|
||||||
logger.info("--> testing basic search with sort");
|
logger.info("--> testing basic search with sort");
|
||||||
String searchRequestBody = "{ \"sort\": [{ \"int\" : \"asc\" }]}";
|
{
|
||||||
response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
Request searchRequest = new Request("GET", "/" + index + "/_search");
|
||||||
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
|
searchRequest.setJsonEntity("{ \"sort\": [{ \"int\" : \"asc\" }]}");
|
||||||
assertNoFailures(response);
|
Map<String, Object> response = entityAsMap(client().performRequest(searchRequest));
|
||||||
numDocs = (int) XContentMapValues.extractValue("hits.total", response);
|
assertNoFailures(response);
|
||||||
assertEquals(count, numDocs);
|
assertTotalHits(count, response);
|
||||||
|
}
|
||||||
|
|
||||||
logger.info("--> testing exists filter");
|
logger.info("--> testing exists filter");
|
||||||
searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"string\"} }}";
|
{
|
||||||
response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
Request searchRequest = new Request("GET", "/" + index + "/_search");
|
||||||
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
|
searchRequest.setJsonEntity("{ \"query\": { \"exists\" : {\"field\": \"string\"} }}");
|
||||||
assertNoFailures(response);
|
Map<String, Object> response = entityAsMap(client().performRequest(searchRequest));
|
||||||
numDocs = (int) XContentMapValues.extractValue("hits.total", response);
|
assertNoFailures(response);
|
||||||
assertEquals(count, numDocs);
|
assertTotalHits(count, response);
|
||||||
|
}
|
||||||
|
|
||||||
searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}";
|
logger.info("--> testing field with dots in the name");
|
||||||
response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
{
|
||||||
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
|
Request searchRequest = new Request("GET", "/" + index + "/_search");
|
||||||
assertNoFailures(response);
|
searchRequest.setJsonEntity("{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}");
|
||||||
numDocs = (int) XContentMapValues.extractValue("hits.total", response);
|
Map<String, Object> response = entityAsMap(client().performRequest(searchRequest));
|
||||||
assertEquals(count, numDocs);
|
assertNoFailures(response);
|
||||||
|
assertTotalHits(count, response);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void assertAllSearchWorks(int count) throws IOException {
|
void assertAllSearchWorks(int count) throws IOException {
|
||||||
logger.info("--> testing _all search");
|
logger.info("--> testing _all search");
|
||||||
Map<String, Object> searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search"));
|
Map<String, Object> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
|
||||||
assertNoFailures(searchRsp);
|
assertNoFailures(response);
|
||||||
int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
assertTotalHits(count, response);
|
||||||
assertEquals(count, totalHits);
|
Map<?, ?> bestHit = (Map<?, ?>) ((List<?>) (XContentMapValues.extractValue("hits.hits", response))).get(0);
|
||||||
Map<?, ?> bestHit = (Map<?, ?>) ((List<?>)(XContentMapValues.extractValue("hits.hits", searchRsp))).get(0);
|
|
||||||
|
|
||||||
// Make sure there are payloads and they are taken into account for the score
|
// Make sure there are payloads and they are taken into account for the score
|
||||||
// the 'string' field has a boost of 4 in the mappings so it should get a payload boost
|
// the 'string' field has a boost of 4 in the mappings so it should get a payload boost
|
||||||
|
@ -543,82 +528,77 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
assertNotNull(stringValue);
|
assertNotNull(stringValue);
|
||||||
String type = (String) bestHit.get("_type");
|
String type = (String) bestHit.get("_type");
|
||||||
String id = (String) bestHit.get("_id");
|
String id = (String) bestHit.get("_id");
|
||||||
String requestBody = "{ \"query\": { \"match_all\" : {} }}";
|
Request explanationRequest = new Request("GET", "/" + index + "/" + type + "/" + id + "/_explain");
|
||||||
String explanation = toStr(client().performRequest("GET", "/" + index + "/" + type + "/" + id,
|
explanationRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}");
|
||||||
Collections.emptyMap(), new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
String explanation = toStr(client().performRequest(explanationRequest));
|
||||||
assertFalse("Could not find payload boost in explanation\n" + explanation, explanation.contains("payloadBoost"));
|
assertFalse("Could not find payload boost in explanation\n" + explanation, explanation.contains("payloadBoost"));
|
||||||
|
|
||||||
// Make sure the query can run on the whole index
|
// Make sure the query can run on the whole index
|
||||||
searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search",
|
Request searchRequest = new Request("GET", "/" + index + "/_search");
|
||||||
Collections.singletonMap("explain", "true"), new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
searchRequest.setEntity(explanationRequest.getEntity());
|
||||||
assertNoFailures(searchRsp);
|
searchRequest.addParameter("explain", "true");
|
||||||
totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
Map<?, ?> matchAllResponse = entityAsMap(client().performRequest(searchRequest));
|
||||||
assertEquals(count, totalHits);
|
assertNoFailures(matchAllResponse);
|
||||||
|
assertTotalHits(count, matchAllResponse);
|
||||||
}
|
}
|
||||||
|
|
||||||
void assertBasicAggregationWorks() throws IOException {
|
void assertBasicAggregationWorks() throws IOException {
|
||||||
// histogram on a long
|
// histogram on a long
|
||||||
String requestBody = "{ \"aggs\": { \"histo\" : {\"histogram\" : {\"field\": \"int\", \"interval\": 10}} }}";
|
Request longHistogramRequest = new Request("GET", "/" + index + "/_search");
|
||||||
Map<?, ?> searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
longHistogramRequest.setJsonEntity("{ \"aggs\": { \"histo\" : {\"histogram\" : {\"field\": \"int\", \"interval\": 10}} }}");
|
||||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
Map<?, ?> longHistogram = entityAsMap(client().performRequest(longHistogramRequest));
|
||||||
assertNoFailures(searchRsp);
|
assertNoFailures(longHistogram);
|
||||||
List<?> histoBuckets = (List<?>) XContentMapValues.extractValue("aggregations.histo.buckets", searchRsp);
|
List<?> histoBuckets = (List<?>) XContentMapValues.extractValue("aggregations.histo.buckets", longHistogram);
|
||||||
long totalCount = 0;
|
int histoCount = 0;
|
||||||
for (Object entry : histoBuckets) {
|
for (Object entry : histoBuckets) {
|
||||||
Map<?, ?> bucket = (Map<?, ?>) entry;
|
Map<?, ?> bucket = (Map<?, ?>) entry;
|
||||||
totalCount += (Integer) bucket.get("doc_count");
|
histoCount += (Integer) bucket.get("doc_count");
|
||||||
}
|
}
|
||||||
int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
assertTotalHits(histoCount, longHistogram);
|
||||||
assertEquals(totalHits, totalCount);
|
|
||||||
|
|
||||||
// terms on a boolean
|
// terms on a boolean
|
||||||
requestBody = "{ \"aggs\": { \"bool_terms\" : {\"terms\" : {\"field\": \"bool\"}} }}";
|
Request boolTermsRequest = new Request("GET", "/" + index + "/_search");
|
||||||
searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
boolTermsRequest.setJsonEntity("{ \"aggs\": { \"bool_terms\" : {\"terms\" : {\"field\": \"bool\"}} }}");
|
||||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
Map<?, ?> boolTerms = entityAsMap(client().performRequest(boolTermsRequest));
|
||||||
List<?> termsBuckets = (List<?>) XContentMapValues.extractValue("aggregations.bool_terms.buckets", searchRsp);
|
List<?> termsBuckets = (List<?>) XContentMapValues.extractValue("aggregations.bool_terms.buckets", boolTerms);
|
||||||
totalCount = 0;
|
int termsCount = 0;
|
||||||
for (Object entry : termsBuckets) {
|
for (Object entry : termsBuckets) {
|
||||||
Map<?, ?> bucket = (Map<?, ?>) entry;
|
Map<?, ?> bucket = (Map<?, ?>) entry;
|
||||||
totalCount += (Integer) bucket.get("doc_count");
|
termsCount += (Integer) bucket.get("doc_count");
|
||||||
}
|
}
|
||||||
totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
assertTotalHits(termsCount, boolTerms);
|
||||||
assertEquals(totalHits, totalCount);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void assertRealtimeGetWorks() throws IOException {
|
void assertRealtimeGetWorks() throws IOException {
|
||||||
String requestBody = "{ \"index\": { \"refresh_interval\" : -1 }}";
|
Request disableAutoRefresh = new Request("PUT", "/" + index + "/_settings");
|
||||||
Response response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
|
disableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : -1 }}");
|
||||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
|
client().performRequest(disableAutoRefresh);
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
requestBody = "{ \"query\": { \"match_all\" : {} }}";
|
Request searchRequest = new Request("GET", "/" + index + "/_search");
|
||||||
Map<String, Object> searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
searchRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}");
|
||||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
Map<?, ?> searchResponse = entityAsMap(client().performRequest(searchRequest));
|
||||||
Map<?, ?> hit = (Map<?, ?>) ((List<?>)(XContentMapValues.extractValue("hits.hits", searchRsp))).get(0);
|
Map<?, ?> hit = (Map<?, ?>) ((List<?>)(XContentMapValues.extractValue("hits.hits", searchResponse))).get(0);
|
||||||
String docId = (String) hit.get("_id");
|
String docId = (String) hit.get("_id");
|
||||||
|
|
||||||
requestBody = "{ \"doc\" : { \"foo\": \"bar\"}}";
|
Request updateRequest = new Request("POST", "/" + index + "/doc/" + docId + "/_update");
|
||||||
response = client().performRequest("POST", "/" + index + "/doc/" + docId + "/_update", Collections.emptyMap(),
|
updateRequest.setJsonEntity("{ \"doc\" : { \"foo\": \"bar\"}}");
|
||||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
|
client().performRequest(updateRequest);
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
Map<String, Object> getRsp = toMap(client().performRequest("GET", "/" + index + "/doc/" + docId));
|
Map<String, Object> getRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/doc/" + docId)));
|
||||||
Map<?, ?> source = (Map<?, ?>) getRsp.get("_source");
|
Map<?, ?> source = (Map<?, ?>) getRsp.get("_source");
|
||||||
assertTrue("doc does not contain 'foo' key: " + source, source.containsKey("foo"));
|
assertTrue("doc does not contain 'foo' key: " + source, source.containsKey("foo"));
|
||||||
|
|
||||||
requestBody = "{ \"index\": { \"refresh_interval\" : \"1s\" }}";
|
Request enableAutoRefresh = new Request("PUT", "/" + index + "/_settings");
|
||||||
response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
|
enableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : \"1s\" }}");
|
||||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
|
client().performRequest(enableAutoRefresh);
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void assertStoredBinaryFields(int count) throws Exception {
|
void assertStoredBinaryFields(int count) throws Exception {
|
||||||
String requestBody = "{ \"query\": { \"match_all\" : {} }, \"size\": 100, \"stored_fields\": \"binary\"}";
|
Request request = new Request("GET", "/" + index + "/_search");
|
||||||
Map<String, Object> rsp = toMap(client().performRequest("GET", "/" + index + "/_search",
|
request.setJsonEntity("{ \"query\": { \"match_all\" : {} }, \"size\": 100, \"stored_fields\": \"binary\"}");
|
||||||
Collections.emptyMap(), new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
Map<String, Object> rsp = entityAsMap(client().performRequest(request));
|
||||||
|
|
||||||
int totalCount = (Integer) XContentMapValues.extractValue("hits.total", rsp);
|
assertTotalHits(count, rsp);
|
||||||
assertEquals(count, totalCount);
|
|
||||||
List<?> hits = (List<?>) XContentMapValues.extractValue("hits.hits", rsp);
|
List<?> hits = (List<?>) XContentMapValues.extractValue("hits.hits", rsp);
|
||||||
assertEquals(100, hits.size());
|
assertEquals(100, hits.size());
|
||||||
for (Object hit : hits) {
|
for (Object hit : hits) {
|
||||||
|
@ -631,14 +611,6 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, Object> toMap(Response response) throws IOException {
|
|
||||||
return toMap(EntityUtils.toString(response.getEntity()));
|
|
||||||
}
|
|
||||||
|
|
||||||
static Map<String, Object> toMap(String response) throws IOException {
|
|
||||||
return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
static String toStr(Response response) throws IOException {
|
static String toStr(Response response) throws IOException {
|
||||||
return EntityUtils.toString(response.getEntity());
|
return EntityUtils.toString(response.getEntity());
|
||||||
}
|
}
|
||||||
|
@ -648,6 +620,11 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
assertEquals(0, failed);
|
assertEquals(0, failed);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void assertTotalHits(int expectedTotalHits, Map<?, ?> response) {
|
||||||
|
int actualTotalHits = (Integer) XContentMapValues.extractValue("hits.total", response);
|
||||||
|
assertEquals(expectedTotalHits, actualTotalHits);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests that a single document survives. Super basic smoke test.
|
* Tests that a single document survives. Super basic smoke test.
|
||||||
*/
|
*/
|
||||||
|
@ -656,11 +633,12 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
String doc = "{\"test\": \"test\"}";
|
String doc = "{\"test\": \"test\"}";
|
||||||
|
|
||||||
if (runningAgainstOldCluster) {
|
if (runningAgainstOldCluster) {
|
||||||
client().performRequest("PUT", docLocation, singletonMap("refresh", "true"),
|
Request createDoc = new Request("PUT", docLocation);
|
||||||
new StringEntity(doc, ContentType.APPLICATION_JSON));
|
createDoc.setJsonEntity(doc);
|
||||||
|
client().performRequest(createDoc);
|
||||||
}
|
}
|
||||||
|
|
||||||
assertThat(toStr(client().performRequest("GET", docLocation)), containsString(doc));
|
assertThat(toStr(client().performRequest(new Request("GET", docLocation))), containsString(doc));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -733,16 +711,18 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Count the documents in the index to make sure we have as many as we put there
|
// Count the documents in the index to make sure we have as many as we put there
|
||||||
String countResponse = toStr(client().performRequest("GET", "/" + index + "/_search", singletonMap("size", "0")));
|
Request countRequest = new Request("GET", "/" + index + "/_search");
|
||||||
|
countRequest.addParameter("size", "0");
|
||||||
|
String countResponse = toStr(client().performRequest(countRequest));
|
||||||
assertThat(countResponse, containsString("\"total\":" + count));
|
assertThat(countResponse, containsString("\"total\":" + count));
|
||||||
|
|
||||||
if (false == runningAgainstOldCluster) {
|
if (false == runningAgainstOldCluster) {
|
||||||
boolean restoredFromTranslog = false;
|
boolean restoredFromTranslog = false;
|
||||||
boolean foundPrimary = false;
|
boolean foundPrimary = false;
|
||||||
Map<String, String> params = new HashMap<>();
|
Request recoveryRequest = new Request("GET", "/_cat/recovery/" + index);
|
||||||
params.put("h", "index,shard,type,stage,translog_ops_recovered");
|
recoveryRequest.addParameter("h", "index,shard,type,stage,translog_ops_recovered");
|
||||||
params.put("s", "index,shard,type");
|
recoveryRequest.addParameter("s", "index,shard,type");
|
||||||
String recoveryResponse = toStr(client().performRequest("GET", "/_cat/recovery/" + index, params));
|
String recoveryResponse = toStr(client().performRequest(recoveryRequest));
|
||||||
for (String line : recoveryResponse.split("\n")) {
|
for (String line : recoveryResponse.split("\n")) {
|
||||||
// Find the primaries
|
// Find the primaries
|
||||||
foundPrimary = true;
|
foundPrimary = true;
|
||||||
|
@ -768,11 +748,10 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) {
|
if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) {
|
||||||
int numCurrentVersion = 0;
|
int numCurrentVersion = 0;
|
||||||
int numBwcVersion = 0;
|
int numBwcVersion = 0;
|
||||||
params.clear();
|
Request segmentsRequest = new Request("GET", "/_cat/segments/" + index);
|
||||||
params.put("h", "prirep,shard,index,version");
|
segmentsRequest.addParameter("h", "prirep,shard,index,version");
|
||||||
params.put("s", "prirep,shard,index");
|
segmentsRequest.addParameter("s", "prirep,shard,index");
|
||||||
String segmentsResponse = toStr(
|
String segmentsResponse = toStr(client().performRequest(segmentsRequest));
|
||||||
client().performRequest("GET", "/_cat/segments/" + index, params));
|
|
||||||
for (String line : segmentsResponse.split("\n")) {
|
for (String line : segmentsResponse.split("\n")) {
|
||||||
if (false == line.startsWith("p")) {
|
if (false == line.startsWith("p")) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -817,14 +796,16 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
// Count the documents in the index to make sure we have as many as we put there
|
// Count the documents in the index to make sure we have as many as we put there
|
||||||
String countResponse = toStr(client().performRequest("GET", "/" + index + "/_search", singletonMap("size", "0")));
|
Request countRequest = new Request("GET", "/" + index + "/_search");
|
||||||
|
countRequest.addParameter("size", "0");
|
||||||
|
String countResponse = toStr(client().performRequest(countRequest));
|
||||||
assertThat(countResponse, containsString("\"total\":" + count));
|
assertThat(countResponse, containsString("\"total\":" + count));
|
||||||
|
|
||||||
// Stick a routing attribute into to cluster settings so we can see it after the restore
|
// Stick a routing attribute into to cluster settings so we can see it after the restore
|
||||||
HttpEntity routingSetting = new StringEntity(
|
Request addRoutingSettings = new Request("PUT", "/_cluster/settings");
|
||||||
"{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}",
|
addRoutingSettings.setJsonEntity(
|
||||||
ContentType.APPLICATION_JSON);
|
"{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}");
|
||||||
client().performRequest("PUT", "/_cluster/settings", emptyMap(), routingSetting);
|
client().performRequest(addRoutingSettings);
|
||||||
|
|
||||||
// Stick a template into the cluster so we can see it after the restore
|
// Stick a template into the cluster so we can see it after the restore
|
||||||
XContentBuilder templateBuilder = JsonXContent.contentBuilder().startObject();
|
XContentBuilder templateBuilder = JsonXContent.contentBuilder().startObject();
|
||||||
|
@ -857,8 +838,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
templateBuilder.endObject();
|
templateBuilder.endObject();
|
||||||
}
|
}
|
||||||
templateBuilder.endObject().endObject();
|
templateBuilder.endObject().endObject();
|
||||||
client().performRequest("PUT", "/_template/test_template", emptyMap(),
|
Request createTemplateRequest = new Request("PUT", "/_template/test_template");
|
||||||
new StringEntity(Strings.toString(templateBuilder), ContentType.APPLICATION_JSON));
|
createTemplateRequest.setJsonEntity(Strings.toString(templateBuilder));
|
||||||
|
client().performRequest(createTemplateRequest);
|
||||||
|
|
||||||
if (runningAgainstOldCluster) {
|
if (runningAgainstOldCluster) {
|
||||||
// Create the repo
|
// Create the repo
|
||||||
|
@ -871,13 +853,15 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
repoConfig.endObject();
|
repoConfig.endObject();
|
||||||
}
|
}
|
||||||
repoConfig.endObject();
|
repoConfig.endObject();
|
||||||
client().performRequest("PUT", "/_snapshot/repo", emptyMap(),
|
Request createRepoRequest = new Request("PUT", "/_snapshot/repo");
|
||||||
new StringEntity(Strings.toString(repoConfig), ContentType.APPLICATION_JSON));
|
createRepoRequest.setJsonEntity(Strings.toString(repoConfig));
|
||||||
|
client().performRequest(createRepoRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
client().performRequest("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"),
|
Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"));
|
||||||
singletonMap("wait_for_completion", "true"),
|
createSnapshot.addParameter("wait_for_completion", "true");
|
||||||
new StringEntity("{\"indices\": \"" + index + "\"}", ContentType.APPLICATION_JSON));
|
createSnapshot.setJsonEntity("{\"indices\": \"" + index + "\"}");
|
||||||
|
client().performRequest(createSnapshot);
|
||||||
|
|
||||||
checkSnapshot("old_snap", count, oldClusterVersion);
|
checkSnapshot("old_snap", count, oldClusterVersion);
|
||||||
if (false == runningAgainstOldCluster) {
|
if (false == runningAgainstOldCluster) {
|
||||||
|
@ -896,10 +880,13 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
}
|
}
|
||||||
mappingsAndSettings.endObject();
|
mappingsAndSettings.endObject();
|
||||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
Request createIndex = new Request("PUT", "/" + index);
|
||||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||||
|
client().performRequest(createIndex);
|
||||||
} else {
|
} else {
|
||||||
Response response = client().performRequest("GET", index + "/_stats", singletonMap("level", "shards"));
|
Request statsRequest = new Request("GET", index + "/_stats");
|
||||||
|
statsRequest.addParameter("level", "shards");
|
||||||
|
Response response = client().performRequest(statsRequest);
|
||||||
List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0");
|
List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0");
|
||||||
String globalHistoryUUID = null;
|
String globalHistoryUUID = null;
|
||||||
for (Object shard : shardStats) {
|
for (Object shard : shardStats) {
|
||||||
|
@ -920,18 +907,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
|
|
||||||
private void checkSnapshot(String snapshotName, int count, Version tookOnVersion) throws IOException {
|
private void checkSnapshot(String snapshotName, int count, Version tookOnVersion) throws IOException {
|
||||||
// Check the snapshot metadata, especially the version
|
// Check the snapshot metadata, especially the version
|
||||||
String response = toStr(client().performRequest("GET", "/_snapshot/repo/" + snapshotName, listSnapshotVerboseParams()));
|
Request listSnapshotRequest = new Request("GET", "/_snapshot/repo/" + snapshotName);
|
||||||
Map<String, Object> map = toMap(response);
|
if (false == (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0))) {
|
||||||
assertEquals(response, singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", map));
|
listSnapshotRequest.addParameter("verbose", "true");
|
||||||
assertEquals(response, singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", map));
|
}
|
||||||
assertEquals(response, singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", map));
|
Map<String, Object> listSnapshotResponse = entityAsMap(client().performRequest(listSnapshotRequest));
|
||||||
|
assertEquals(singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", listSnapshotResponse));
|
||||||
|
assertEquals(singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", listSnapshotResponse));
|
||||||
|
assertEquals(singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", listSnapshotResponse));
|
||||||
|
|
||||||
// Remove the routing setting and template so we can test restoring them.
|
// Remove the routing setting and template so we can test restoring them.
|
||||||
HttpEntity clearRoutingSetting = new StringEntity(
|
Request clearRoutingFromSettings = new Request("PUT", "/_cluster/settings");
|
||||||
"{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}",
|
clearRoutingFromSettings.setJsonEntity("{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}");
|
||||||
ContentType.APPLICATION_JSON);
|
client().performRequest(clearRoutingFromSettings);
|
||||||
client().performRequest("PUT", "/_cluster/settings", emptyMap(), clearRoutingSetting);
|
client().performRequest(new Request("DELETE", "/_template/test_template"));
|
||||||
client().performRequest("DELETE", "/_template/test_template", emptyMap(), clearRoutingSetting);
|
|
||||||
|
|
||||||
// Restore
|
// Restore
|
||||||
XContentBuilder restoreCommand = JsonXContent.contentBuilder().startObject();
|
XContentBuilder restoreCommand = JsonXContent.contentBuilder().startObject();
|
||||||
|
@ -940,11 +929,15 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
restoreCommand.field("rename_pattern", index);
|
restoreCommand.field("rename_pattern", index);
|
||||||
restoreCommand.field("rename_replacement", "restored_" + index);
|
restoreCommand.field("rename_replacement", "restored_" + index);
|
||||||
restoreCommand.endObject();
|
restoreCommand.endObject();
|
||||||
client().performRequest("POST", "/_snapshot/repo/" + snapshotName + "/_restore", singletonMap("wait_for_completion", "true"),
|
Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshotName + "/_restore");
|
||||||
new StringEntity(Strings.toString(restoreCommand), ContentType.APPLICATION_JSON));
|
restoreRequest.addParameter("wait_for_completion", "true");
|
||||||
|
restoreRequest.setJsonEntity(Strings.toString(restoreCommand));
|
||||||
|
client().performRequest(restoreRequest);
|
||||||
|
|
||||||
// Make sure search finds all documents
|
// Make sure search finds all documents
|
||||||
String countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0")));
|
Request countRequest = new Request("GET", "/restored_" + index + "/_search");
|
||||||
|
countRequest.addParameter("size", "0");
|
||||||
|
String countResponse = toStr(client().performRequest(countRequest));
|
||||||
assertThat(countResponse, containsString("\"total\":" + count));
|
assertThat(countResponse, containsString("\"total\":" + count));
|
||||||
|
|
||||||
// Add some extra documents to the index to be sure we can still write to it after restoring it
|
// Add some extra documents to the index to be sure we can still write to it after restoring it
|
||||||
|
@ -954,61 +947,56 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
bulk.append("{\"index\":{\"_id\":\"").append(count + i).append("\"}}\n");
|
bulk.append("{\"index\":{\"_id\":\"").append(count + i).append("\"}}\n");
|
||||||
bulk.append("{\"test\":\"test\"}\n");
|
bulk.append("{\"test\":\"test\"}\n");
|
||||||
}
|
}
|
||||||
client().performRequest("POST", "/restored_" + index + "/doc/_bulk", singletonMap("refresh", "true"),
|
Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/doc/_bulk");
|
||||||
new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
|
writeToRestoredRequest.addParameter("refresh", "true");
|
||||||
|
writeToRestoredRequest.setJsonEntity(bulk.toString());
|
||||||
|
client().performRequest(writeToRestoredRequest);
|
||||||
|
|
||||||
// And count to make sure the add worked
|
// And count to make sure the add worked
|
||||||
// Make sure search finds all documents
|
// Make sure search finds all documents
|
||||||
countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0")));
|
Request countAfterWriteRequest = new Request("GET", "/restored_" + index + "/_search");
|
||||||
assertThat(countResponse, containsString("\"total\":" + (count + extras)));
|
countAfterWriteRequest.addParameter("size", "0");
|
||||||
|
String countAfterWriteResponse = toStr(client().performRequest(countAfterWriteRequest));
|
||||||
|
assertThat(countAfterWriteResponse, containsString("\"total\":" + (count + extras)));
|
||||||
|
|
||||||
// Clean up the index for the next iteration
|
// Clean up the index for the next iteration
|
||||||
client().performRequest("DELETE", "/restored_*");
|
client().performRequest(new Request("DELETE", "/restored_*"));
|
||||||
|
|
||||||
// Check settings added by the restore process
|
// Check settings added by the restore process
|
||||||
map = toMap(client().performRequest("GET", "/_cluster/settings", singletonMap("flat_settings", "true")));
|
Request clusterSettingsRequest = new Request("GET", "/_cluster/settings");
|
||||||
Map<String, Object> expected = new HashMap<>();
|
clusterSettingsRequest.addParameter("flat_settings", "true");
|
||||||
expected.put("transient", emptyMap());
|
Map<String, Object> clusterSettingsResponse = entityAsMap(client().performRequest(clusterSettingsRequest));
|
||||||
expected.put("persistent", singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString()));
|
Map<String, Object> expectedClusterSettings = new HashMap<>();
|
||||||
if (expected.equals(map) == false) {
|
expectedClusterSettings.put("transient", emptyMap());
|
||||||
|
expectedClusterSettings.put("persistent",
|
||||||
|
singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString()));
|
||||||
|
if (expectedClusterSettings.equals(clusterSettingsResponse) == false) {
|
||||||
NotEqualMessageBuilder builder = new NotEqualMessageBuilder();
|
NotEqualMessageBuilder builder = new NotEqualMessageBuilder();
|
||||||
builder.compareMaps(map, expected);
|
builder.compareMaps(clusterSettingsResponse, expectedClusterSettings);
|
||||||
fail("settings don't match:\n" + builder.toString());
|
fail("settings don't match:\n" + builder.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that the template was restored successfully
|
// Check that the template was restored successfully
|
||||||
map = toMap(client().performRequest("GET", "/_template/test_template"));
|
Map<String, Object> getTemplateResponse = entityAsMap(client().performRequest(new Request("GET", "/_template/test_template")));
|
||||||
expected = new HashMap<>();
|
Map<String, Object> expectedTemplate = new HashMap<>();
|
||||||
if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_0_0_beta1)) {
|
if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_0_0_beta1)) {
|
||||||
expected.put("template", "evil_*");
|
expectedTemplate.put("template", "evil_*");
|
||||||
} else {
|
} else {
|
||||||
expected.put("index_patterns", singletonList("evil_*"));
|
expectedTemplate.put("index_patterns", singletonList("evil_*"));
|
||||||
}
|
}
|
||||||
expected.put("settings", singletonMap("index", singletonMap("number_of_shards", "1")));
|
expectedTemplate.put("settings", singletonMap("index", singletonMap("number_of_shards", "1")));
|
||||||
expected.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true))));
|
expectedTemplate.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true))));
|
||||||
expected.put("order", 0);
|
expectedTemplate.put("order", 0);
|
||||||
Map<String, Object> aliases = new HashMap<>();
|
Map<String, Object> aliases = new HashMap<>();
|
||||||
aliases.put("alias1", emptyMap());
|
aliases.put("alias1", emptyMap());
|
||||||
aliases.put("alias2", singletonMap("filter", singletonMap("term", singletonMap("version", tookOnVersion.toString()))));
|
aliases.put("alias2", singletonMap("filter", singletonMap("term", singletonMap("version", tookOnVersion.toString()))));
|
||||||
expected.put("aliases", aliases);
|
expectedTemplate.put("aliases", aliases);
|
||||||
expected = singletonMap("test_template", expected);
|
expectedTemplate = singletonMap("test_template", expectedTemplate);
|
||||||
if (false == expected.equals(map)) {
|
if (false == expectedTemplate.equals(getTemplateResponse)) {
|
||||||
NotEqualMessageBuilder builder = new NotEqualMessageBuilder();
|
NotEqualMessageBuilder builder = new NotEqualMessageBuilder();
|
||||||
builder.compareMaps(map, expected);
|
builder.compareMaps(getTemplateResponse, expectedTemplate);
|
||||||
fail("template doesn't match:\n" + builder.toString());
|
fail("template doesn't match:\n" + builder.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parameters required to get the version of Elasticsearch that took the snapshot.
|
|
||||||
* On versions after 5.5 we need a {@code verbose} parameter.
|
|
||||||
*/
|
|
||||||
private Map<String, String> listSnapshotVerboseParams() {
|
|
||||||
if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0)) {
|
|
||||||
return emptyMap();
|
|
||||||
}
|
|
||||||
return singletonMap("verbose", "true");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO tests for upgrades after shrink. We've had trouble with shrink in the past.
|
// TODO tests for upgrades after shrink. We've had trouble with shrink in the past.
|
||||||
|
@ -1018,14 +1006,15 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
logger.info("Indexing {} random documents", count);
|
logger.info("Indexing {} random documents", count);
|
||||||
for (int i = 0; i < count; i++) {
|
for (int i = 0; i < count; i++) {
|
||||||
logger.debug("Indexing document [{}]", i);
|
logger.debug("Indexing document [{}]", i);
|
||||||
client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(),
|
Request createDocument = new Request("POST", "/" + index + "/doc/" + i);
|
||||||
new StringEntity(Strings.toString(docSupplier.apply(i)), ContentType.APPLICATION_JSON));
|
createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i)));
|
||||||
|
client().performRequest(createDocument);
|
||||||
if (rarely()) {
|
if (rarely()) {
|
||||||
refresh();
|
refresh();
|
||||||
}
|
}
|
||||||
if (flushAllowed && rarely()) {
|
if (flushAllowed && rarely()) {
|
||||||
logger.debug("Flushing [{}]", index);
|
logger.debug("Flushing [{}]", index);
|
||||||
client().performRequest("POST", "/" + index + "/_flush");
|
client().performRequest(new Request("POST", "/" + index + "/_flush"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (saveInfo) {
|
if (saveInfo) {
|
||||||
|
@ -1042,13 +1031,16 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
infoDoc.field("value", value);
|
infoDoc.field("value", value);
|
||||||
infoDoc.endObject();
|
infoDoc.endObject();
|
||||||
// Only create the first version so we know how many documents are created when the index is first created
|
// Only create the first version so we know how many documents are created when the index is first created
|
||||||
Map<String, String> params = singletonMap("op_type", "create");
|
Request request = new Request("PUT", "/info/doc/" + index + "_" + type);
|
||||||
client().performRequest("PUT", "/info/doc/" + index + "_" + type, params,
|
request.addParameter("op_type", "create");
|
||||||
new StringEntity(Strings.toString(infoDoc), ContentType.APPLICATION_JSON));
|
request.setJsonEntity(Strings.toString(infoDoc));
|
||||||
|
client().performRequest(request);
|
||||||
}
|
}
|
||||||
|
|
||||||
private String loadInfoDocument(String type) throws IOException {
|
private String loadInfoDocument(String type) throws IOException {
|
||||||
String doc = toStr(client().performRequest("GET", "/info/doc/" + index + "_" + type, singletonMap("filter_path", "_source")));
|
Request request = new Request("GET", "/info/doc/" + index + "_" + type);
|
||||||
|
request.addParameter("filter_path", "_source");
|
||||||
|
String doc = toStr(client().performRequest(request));
|
||||||
Matcher m = Pattern.compile("\"value\":\"(.+)\"").matcher(doc);
|
Matcher m = Pattern.compile("\"value\":\"(.+)\"").matcher(doc);
|
||||||
assertTrue(doc, m.find());
|
assertTrue(doc, m.find());
|
||||||
return m.group(1);
|
return m.group(1);
|
||||||
|
@ -1060,11 +1052,13 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
|
|
||||||
private void refresh() throws IOException {
|
private void refresh() throws IOException {
|
||||||
logger.debug("Refreshing [{}]", index);
|
logger.debug("Refreshing [{}]", index);
|
||||||
client().performRequest("POST", "/" + index + "/_refresh");
|
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<String> dataNodes(String index, RestClient client) throws IOException {
|
private List<String> dataNodes(String index, RestClient client) throws IOException {
|
||||||
Response response = client.performRequest("GET", index + "/_stats", singletonMap("level", "shards"));
|
Request request = new Request("GET", index + "/_stats");
|
||||||
|
request.addParameter("level", "shards");
|
||||||
|
Response response = client.performRequest(request);
|
||||||
List<String> nodes = new ArrayList<>();
|
List<String> nodes = new ArrayList<>();
|
||||||
List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0");
|
List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0");
|
||||||
for (Object shard : shardStats) {
|
for (Object shard : shardStats) {
|
||||||
|
@ -1073,4 +1067,21 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
return nodes;
|
return nodes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wait for an index to have green health, waiting longer than
|
||||||
|
* {@link ESRestTestCase#ensureGreen}.
|
||||||
|
*/
|
||||||
|
protected void ensureGreenLongWait(String index) throws IOException {
|
||||||
|
Request request = new Request("GET", "/_cluster/health/" + index);
|
||||||
|
request.addParameter("timeout", "2m");
|
||||||
|
request.addParameter("wait_for_status", "green");
|
||||||
|
request.addParameter("wait_for_no_relocating_shards", "true");
|
||||||
|
request.addParameter("wait_for_events", "languid");
|
||||||
|
request.addParameter("level", "shards");
|
||||||
|
Map<String, Object> healthRsp = entityAsMap(client().performRequest(request));
|
||||||
|
logger.info("health api response: {}", healthRsp);
|
||||||
|
assertEquals("green", healthRsp.get("status"));
|
||||||
|
assertFalse((Boolean) healthRsp.get("timed_out"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,8 +18,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.upgrades;
|
package org.elasticsearch.upgrades;
|
||||||
|
|
||||||
import org.apache.http.entity.ContentType;
|
|
||||||
import org.apache.http.entity.StringEntity;
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.support.PlainActionFuture;
|
import org.elasticsearch.action.support.PlainActionFuture;
|
||||||
import org.elasticsearch.client.Request;
|
import org.elasticsearch.client.Request;
|
||||||
|
@ -32,14 +30,12 @@ import org.elasticsearch.test.rest.yaml.ObjectPath;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.Future;
|
import java.util.concurrent.Future;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength;
|
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength;
|
||||||
import static java.util.Collections.emptyMap;
|
|
||||||
import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
|
import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
|
||||||
import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING;
|
import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING;
|
||||||
import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY;
|
import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY;
|
||||||
|
@ -51,6 +47,8 @@ import static org.hamcrest.Matchers.notNullValue;
|
||||||
* In depth testing of the recovery mechanism during a rolling restart.
|
* In depth testing of the recovery mechanism during a rolling restart.
|
||||||
*/
|
*/
|
||||||
public class RecoveryIT extends AbstractRollingTestCase {
|
public class RecoveryIT extends AbstractRollingTestCase {
|
||||||
|
|
||||||
|
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31291")
|
||||||
public void testHistoryUUIDIsGenerated() throws Exception {
|
public void testHistoryUUIDIsGenerated() throws Exception {
|
||||||
final String index = "index_history_uuid";
|
final String index = "index_history_uuid";
|
||||||
if (CLUSTER_TYPE == ClusterType.OLD) {
|
if (CLUSTER_TYPE == ClusterType.OLD) {
|
||||||
|
@ -65,8 +63,9 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
||||||
createIndex(index, settings.build());
|
createIndex(index, settings.build());
|
||||||
} else if (CLUSTER_TYPE == ClusterType.UPGRADED) {
|
} else if (CLUSTER_TYPE == ClusterType.UPGRADED) {
|
||||||
ensureGreen(index);
|
ensureGreen(index);
|
||||||
Response response = client().performRequest("GET", index + "/_stats", Collections.singletonMap("level", "shards"));
|
Request shardStatsRequest = new Request("GET", index + "/_stats");
|
||||||
assertOK(response);
|
shardStatsRequest.addParameter("level", "shards");
|
||||||
|
Response response = client().performRequest(shardStatsRequest);
|
||||||
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
||||||
List<Object> shardStats = objectPath.evaluate("indices." + index + ".shards.0");
|
List<Object> shardStats = objectPath.evaluate("indices." + index + ".shards.0");
|
||||||
assertThat(shardStats, hasSize(2));
|
assertThat(shardStats, hasSize(2));
|
||||||
|
@ -87,8 +86,9 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
||||||
private int indexDocs(String index, final int idStart, final int numDocs) throws IOException {
|
private int indexDocs(String index, final int idStart, final int numDocs) throws IOException {
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
final int id = idStart + i;
|
final int id = idStart + i;
|
||||||
assertOK(client().performRequest("PUT", index + "/test/" + id, emptyMap(),
|
Request indexDoc = new Request("PUT", index + "/test/" + id);
|
||||||
new StringEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}", ContentType.APPLICATION_JSON)));
|
indexDoc.setJsonEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}");
|
||||||
|
client().performRequest(indexDoc);
|
||||||
}
|
}
|
||||||
return numDocs;
|
return numDocs;
|
||||||
}
|
}
|
||||||
|
@ -113,7 +113,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
||||||
|
|
||||||
public void testRecoveryWithConcurrentIndexing() throws Exception {
|
public void testRecoveryWithConcurrentIndexing() throws Exception {
|
||||||
final String index = "recovery_with_concurrent_indexing";
|
final String index = "recovery_with_concurrent_indexing";
|
||||||
Response response = client().performRequest("GET", "_nodes");
|
Response response = client().performRequest(new Request("GET", "_nodes"));
|
||||||
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
||||||
final Map<String, Object> nodeMap = objectPath.evaluate("nodes");
|
final Map<String, Object> nodeMap = objectPath.evaluate("nodes");
|
||||||
List<String> nodes = new ArrayList<>(nodeMap.keySet());
|
List<String> nodes = new ArrayList<>(nodeMap.keySet());
|
||||||
|
@ -139,7 +139,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
||||||
updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null));
|
updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null));
|
||||||
asyncIndexDocs(index, 10, 50).get();
|
asyncIndexDocs(index, 10, 50).get();
|
||||||
ensureGreen(index);
|
ensureGreen(index);
|
||||||
assertOK(client().performRequest("POST", index + "/_refresh"));
|
client().performRequest(new Request("POST", index + "/_refresh"));
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(0), 60);
|
assertCount(index, "_only_nodes:" + nodes.get(0), 60);
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(1), 60);
|
assertCount(index, "_only_nodes:" + nodes.get(1), 60);
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(2), 60);
|
assertCount(index, "_only_nodes:" + nodes.get(2), 60);
|
||||||
|
@ -150,7 +150,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
||||||
updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null));
|
updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null));
|
||||||
asyncIndexDocs(index, 60, 50).get();
|
asyncIndexDocs(index, 60, 50).get();
|
||||||
ensureGreen(index);
|
ensureGreen(index);
|
||||||
assertOK(client().performRequest("POST", index + "/_refresh"));
|
client().performRequest(new Request("POST", index + "/_refresh"));
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(0), 110);
|
assertCount(index, "_only_nodes:" + nodes.get(0), 110);
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(1), 110);
|
assertCount(index, "_only_nodes:" + nodes.get(1), 110);
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(2), 110);
|
assertCount(index, "_only_nodes:" + nodes.get(2), 110);
|
||||||
|
@ -161,15 +161,16 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertCount(final String index, final String preference, final int expectedCount) throws IOException {
|
private void assertCount(final String index, final String preference, final int expectedCount) throws IOException {
|
||||||
final Response response = client().performRequest("GET", index + "/_count", Collections.singletonMap("preference", preference));
|
final Request request = new Request("GET", index + "/_count");
|
||||||
assertOK(response);
|
request.addParameter("preference", preference);
|
||||||
|
final Response response = client().performRequest(request);
|
||||||
final int actualCount = Integer.parseInt(ObjectPath.createFromResponse(response).evaluate("count").toString());
|
final int actualCount = Integer.parseInt(ObjectPath.createFromResponse(response).evaluate("count").toString());
|
||||||
assertThat(actualCount, equalTo(expectedCount));
|
assertThat(actualCount, equalTo(expectedCount));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private String getNodeId(Predicate<Version> versionPredicate) throws IOException {
|
private String getNodeId(Predicate<Version> versionPredicate) throws IOException {
|
||||||
Response response = client().performRequest("GET", "_nodes");
|
Response response = client().performRequest(new Request("GET", "_nodes"));
|
||||||
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
||||||
Map<String, Object> nodesAsMap = objectPath.evaluate("nodes");
|
Map<String, Object> nodesAsMap = objectPath.evaluate("nodes");
|
||||||
for (String id : nodesAsMap.keySet()) {
|
for (String id : nodesAsMap.keySet()) {
|
||||||
|
@ -216,7 +217,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
||||||
updateIndexSettings(index, Settings.builder().put("index.routing.allocation.include._id", newNode));
|
updateIndexSettings(index, Settings.builder().put("index.routing.allocation.include._id", newNode));
|
||||||
asyncIndexDocs(index, 10, 50).get();
|
asyncIndexDocs(index, 10, 50).get();
|
||||||
ensureGreen(index);
|
ensureGreen(index);
|
||||||
assertOK(client().performRequest("POST", index + "/_refresh"));
|
client().performRequest(new Request("POST", index + "/_refresh"));
|
||||||
assertCount(index, "_only_nodes:" + newNode, 60);
|
assertCount(index, "_only_nodes:" + newNode, 60);
|
||||||
break;
|
break;
|
||||||
case UPGRADED:
|
case UPGRADED:
|
||||||
|
@ -226,8 +227,8 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
||||||
);
|
);
|
||||||
asyncIndexDocs(index, 60, 50).get();
|
asyncIndexDocs(index, 60, 50).get();
|
||||||
ensureGreen(index);
|
ensureGreen(index);
|
||||||
assertOK(client().performRequest("POST", index + "/_refresh"));
|
client().performRequest(new Request("POST", index + "/_refresh"));
|
||||||
Response response = client().performRequest("GET", "_nodes");
|
Response response = client().performRequest(new Request("GET", "_nodes"));
|
||||||
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
||||||
final Map<String, Object> nodeMap = objectPath.evaluate("nodes");
|
final Map<String, Object> nodeMap = objectPath.evaluate("nodes");
|
||||||
List<String> nodes = new ArrayList<>(nodeMap.keySet());
|
List<String> nodes = new ArrayList<>(nodeMap.keySet());
|
||||||
|
|
|
@ -57,6 +57,7 @@ import static org.hamcrest.CoreMatchers.is;
|
||||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||||
import static org.hamcrest.MatcherAssert.assertThat;
|
import static org.hamcrest.MatcherAssert.assertThat;
|
||||||
import static org.hamcrest.Matchers.isEmptyString;
|
import static org.hamcrest.Matchers.isEmptyString;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
import static org.junit.Assume.assumeThat;
|
import static org.junit.Assume.assumeThat;
|
||||||
import static org.junit.Assume.assumeTrue;
|
import static org.junit.Assume.assumeTrue;
|
||||||
|
@ -302,5 +303,26 @@ public abstract class ArchiveTestCase extends PackagingTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void test90SecurityCliPackaging() {
|
||||||
|
assumeThat(installation, is(notNullValue()));
|
||||||
|
|
||||||
|
final Installation.Executables bin = installation.executables();
|
||||||
|
final Shell sh = new Shell();
|
||||||
|
|
||||||
|
if (distribution().equals(Distribution.DEFAULT_TAR) || distribution().equals(Distribution.DEFAULT_ZIP)) {
|
||||||
|
assertTrue(Files.exists(installation.lib.resolve("tools").resolve("security-cli")));
|
||||||
|
Platforms.onLinux(() -> {
|
||||||
|
final Result result = sh.run(bin.elasticsearchCertutil + " help");
|
||||||
|
assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack"));
|
||||||
|
});
|
||||||
|
|
||||||
|
Platforms.onWindows(() -> {
|
||||||
|
final Result result = sh.run(bin.elasticsearchCertutil + " help");
|
||||||
|
assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack"));
|
||||||
|
});
|
||||||
|
} else if (distribution().equals(Distribution.OSS_TAR) || distribution().equals(Distribution.OSS_ZIP)) {
|
||||||
|
assertFalse(Files.exists(installation.lib.resolve("tools").resolve("security-cli")));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,6 +101,7 @@ public class Installation {
|
||||||
public final Path elasticsearchPlugin = platformExecutable("elasticsearch-plugin");
|
public final Path elasticsearchPlugin = platformExecutable("elasticsearch-plugin");
|
||||||
public final Path elasticsearchKeystore = platformExecutable("elasticsearch-keystore");
|
public final Path elasticsearchKeystore = platformExecutable("elasticsearch-keystore");
|
||||||
public final Path elasticsearchTranslog = platformExecutable("elasticsearch-translog");
|
public final Path elasticsearchTranslog = platformExecutable("elasticsearch-translog");
|
||||||
|
public final Path elasticsearchCertutil = platformExecutable("elasticsearch-certutil");
|
||||||
|
|
||||||
private Path platformExecutable(String name) {
|
private Path platformExecutable(String name) {
|
||||||
final String platformExecutableName = Platforms.WINDOWS
|
final String platformExecutableName = Platforms.WINDOWS
|
||||||
|
|
|
@ -106,7 +106,7 @@ dependencies {
|
||||||
compile 'com.carrotsearch:hppc:0.7.1'
|
compile 'com.carrotsearch:hppc:0.7.1'
|
||||||
|
|
||||||
// time handling, remove with java 8 time
|
// time handling, remove with java 8 time
|
||||||
compile 'joda-time:joda-time:2.9.9'
|
compile 'joda-time:joda-time:2.10'
|
||||||
|
|
||||||
// percentiles aggregation
|
// percentiles aggregation
|
||||||
compile 'com.tdunning:t-digest:3.2'
|
compile 'com.tdunning:t-digest:3.2'
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
f66c8125d1057ffce6c4e29e624cac863e110e2b
|
|
@ -1 +0,0 @@
|
||||||
f7b520c458572890807d143670c9b24f4de90897
|
|
|
@ -32,6 +32,11 @@ public interface AliasesRequest extends IndicesRequest.Replaceable {
|
||||||
*/
|
*/
|
||||||
String[] aliases();
|
String[] aliases();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the aliases as they were originally requested, before any potential name resolution
|
||||||
|
*/
|
||||||
|
String[] getOriginalAliases();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Replaces current aliases with the provided aliases.
|
* Replaces current aliases with the provided aliases.
|
||||||
*
|
*
|
||||||
|
|
|
@ -48,7 +48,6 @@ public class ClusterGetSettingsResponse extends ActionResponse implements ToXCon
|
||||||
static final String TRANSIENT_FIELD = "transient";
|
static final String TRANSIENT_FIELD = "transient";
|
||||||
static final String DEFAULTS_FIELD = "defaults";
|
static final String DEFAULTS_FIELD = "defaults";
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
private static final ConstructingObjectParser<ClusterGetSettingsResponse, Void> PARSER =
|
private static final ConstructingObjectParser<ClusterGetSettingsResponse, Void> PARSER =
|
||||||
new ConstructingObjectParser<>(
|
new ConstructingObjectParser<>(
|
||||||
"cluster_get_settings_response",
|
"cluster_get_settings_response",
|
||||||
|
|
|
@ -214,6 +214,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
||||||
private final AliasActions.Type type;
|
private final AliasActions.Type type;
|
||||||
private String[] indices;
|
private String[] indices;
|
||||||
private String[] aliases = Strings.EMPTY_ARRAY;
|
private String[] aliases = Strings.EMPTY_ARRAY;
|
||||||
|
private String[] originalAliases = Strings.EMPTY_ARRAY;
|
||||||
private String filter;
|
private String filter;
|
||||||
private String routing;
|
private String routing;
|
||||||
private String indexRouting;
|
private String indexRouting;
|
||||||
|
@ -238,6 +239,9 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
||||||
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||||
writeIndex = in.readOptionalBoolean();
|
writeIndex = in.readOptionalBoolean();
|
||||||
}
|
}
|
||||||
|
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||||
|
originalAliases = in.readStringArray();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -252,6 +256,9 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
||||||
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||||
out.writeOptionalBoolean(writeIndex);
|
out.writeOptionalBoolean(writeIndex);
|
||||||
}
|
}
|
||||||
|
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||||
|
out.writeStringArray(originalAliases);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -315,6 +322,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this.aliases = aliases;
|
this.aliases = aliases;
|
||||||
|
this.originalAliases = aliases;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -329,6 +337,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
||||||
throw new IllegalArgumentException("[alias] can't be empty string");
|
throw new IllegalArgumentException("[alias] can't be empty string");
|
||||||
}
|
}
|
||||||
this.aliases = new String[] {alias};
|
this.aliases = new String[] {alias};
|
||||||
|
this.originalAliases = aliases;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -432,6 +441,11 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
||||||
this.aliases = aliases;
|
this.aliases = aliases;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String[] getOriginalAliases() {
|
||||||
|
return originalAliases;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean expandAliasesWildcards() {
|
public boolean expandAliasesWildcards() {
|
||||||
//remove operations support wildcards among aliases, add operations don't
|
//remove operations support wildcards among aliases, add operations don't
|
||||||
|
@ -579,7 +593,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
||||||
}, AliasActions.PARSER, new ParseField("actions"));
|
}, AliasActions.PARSER, new ParseField("actions"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static IndicesAliasesRequest fromXContent(XContentParser parser) throws IOException {
|
public static IndicesAliasesRequest fromXContent(XContentParser parser) {
|
||||||
return PARSER.apply(parser, null);
|
return PARSER.apply(parser, null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,7 +95,7 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction<Ind
|
||||||
Set<String> aliases = new HashSet<>();
|
Set<String> aliases = new HashSet<>();
|
||||||
for (AliasActions action : actions) {
|
for (AliasActions action : actions) {
|
||||||
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), action.indices());
|
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), action.indices());
|
||||||
Collections.addAll(aliases, action.aliases());
|
Collections.addAll(aliases, action.getOriginalAliases());
|
||||||
for (String index : concreteIndices) {
|
for (String index : concreteIndices) {
|
||||||
switch (action.actionType()) {
|
switch (action.actionType()) {
|
||||||
case ADD:
|
case ADD:
|
||||||
|
@ -142,7 +142,7 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction<Ind
|
||||||
if (action.expandAliasesWildcards()) {
|
if (action.expandAliasesWildcards()) {
|
||||||
//for DELETE we expand the aliases
|
//for DELETE we expand the aliases
|
||||||
String[] indexAsArray = {concreteIndex};
|
String[] indexAsArray = {concreteIndex};
|
||||||
ImmutableOpenMap<String, List<AliasMetaData>> aliasMetaData = metaData.findAliases(action.aliases(), indexAsArray);
|
ImmutableOpenMap<String, List<AliasMetaData>> aliasMetaData = metaData.findAliases(action, indexAsArray);
|
||||||
List<String> finalAliases = new ArrayList<>();
|
List<String> finalAliases = new ArrayList<>();
|
||||||
for (ObjectCursor<List<AliasMetaData>> curAliases : aliasMetaData.values()) {
|
for (ObjectCursor<List<AliasMetaData>> curAliases : aliasMetaData.values()) {
|
||||||
for (AliasMetaData aliasMeta: curAliases.value) {
|
for (AliasMetaData aliasMeta: curAliases.value) {
|
||||||
|
|
|
@ -63,7 +63,7 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction<Get
|
||||||
@Override
|
@Override
|
||||||
protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener<GetAliasesResponse> listener) {
|
protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener<GetAliasesResponse> listener) {
|
||||||
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request);
|
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request);
|
||||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases = state.metaData().findAliases(request.aliases(), concreteIndices);
|
ImmutableOpenMap<String, List<AliasMetaData>> aliases = state.metaData().findAliases(request, concreteIndices);
|
||||||
listener.onResponse(new GetAliasesResponse(postProcess(request, concreteIndices, aliases)));
|
listener.onResponse(new GetAliasesResponse(postProcess(request, concreteIndices, aliases)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,15 +32,14 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.common.Strings;
|
|
||||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.settings.SettingsFilter;
|
import org.elasticsearch.common.settings.SettingsFilter;
|
||||||
import org.elasticsearch.indices.IndicesService;
|
import org.elasticsearch.indices.IndicesService;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -110,7 +109,7 @@ public class TransportGetIndexAction extends TransportClusterInfoAction<GetIndex
|
||||||
break;
|
break;
|
||||||
case ALIASES:
|
case ALIASES:
|
||||||
if (!doneAliases) {
|
if (!doneAliases) {
|
||||||
aliasesResult = state.metaData().findAliases(Strings.EMPTY_ARRAY, concreteIndices);
|
aliasesResult = state.metaData().findAllAliases(concreteIndices);
|
||||||
doneAliases = true;
|
doneAliases = true;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -45,7 +45,6 @@ public class QueryExplanation implements Streamable, ToXContentFragment {
|
||||||
|
|
||||||
public static final int RANDOM_SHARD = -1;
|
public static final int RANDOM_SHARD = -1;
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
static ConstructingObjectParser<QueryExplanation, Void> PARSER = new ConstructingObjectParser<>(
|
static ConstructingObjectParser<QueryExplanation, Void> PARSER = new ConstructingObjectParser<>(
|
||||||
"query_explanation",
|
"query_explanation",
|
||||||
true,
|
true,
|
||||||
|
|
|
@ -129,7 +129,6 @@ public class GetResponse extends ActionResponse implements Iterable<DocumentFiel
|
||||||
/**
|
/**
|
||||||
* The source of the document (As a map).
|
* The source of the document (As a map).
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings({"unchecked"})
|
|
||||||
public Map<String, Object> getSourceAsMap() throws ElasticsearchParseException {
|
public Map<String, Object> getSourceAsMap() throws ElasticsearchParseException {
|
||||||
return getResult.sourceAsMap();
|
return getResult.sourceAsMap();
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.action.get;
|
package org.elasticsearch.action.get;
|
||||||
|
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
|
||||||
import org.elasticsearch.action.support.ActionFilters;
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
import org.elasticsearch.action.support.TransportActions;
|
import org.elasticsearch.action.support.TransportActions;
|
||||||
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
|
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
|
||||||
|
@ -90,9 +89,9 @@ public class TransportShardMultiGetAction extends TransportSingleShardAction<Mul
|
||||||
GetResult getResult = indexShard.getService().get(item.type(), item.id(), item.storedFields(), request.realtime(), item.version(),
|
GetResult getResult = indexShard.getService().get(item.type(), item.id(), item.storedFields(), request.realtime(), item.version(),
|
||||||
item.versionType(), item.fetchSourceContext());
|
item.versionType(), item.fetchSourceContext());
|
||||||
response.add(request.locations.get(i), new GetResponse(getResult));
|
response.add(request.locations.get(i), new GetResponse(getResult));
|
||||||
} catch (Exception e) {
|
} catch (RuntimeException e) {
|
||||||
if (TransportActions.isShardNotAvailableException(e)) {
|
if (TransportActions.isShardNotAvailableException(e)) {
|
||||||
throw (ElasticsearchException) e;
|
throw e;
|
||||||
} else {
|
} else {
|
||||||
logger.debug(() -> new ParameterizedMessage("{} failed to execute multi_get for [{}]/[{}]", shardId,
|
logger.debug(() -> new ParameterizedMessage("{} failed to execute multi_get for [{}]/[{}]", shardId,
|
||||||
item.type(), item.id()), e);
|
item.type(), item.id()), e);
|
||||||
|
|
|
@ -32,8 +32,8 @@ import org.elasticsearch.ingest.IngestDocument;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
|
||||||
public class SimulateProcessorResult implements Writeable, ToXContentObject {
|
public class SimulateProcessorResult implements Writeable, ToXContentObject {
|
||||||
|
|
||||||
|
@ -42,7 +42,6 @@ public class SimulateProcessorResult implements Writeable, ToXContentObject {
|
||||||
private final WriteableIngestDocument ingestDocument;
|
private final WriteableIngestDocument ingestDocument;
|
||||||
private final Exception failure;
|
private final Exception failure;
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
private static final ConstructingObjectParser<ElasticsearchException, Void> IGNORED_ERROR_PARSER =
|
private static final ConstructingObjectParser<ElasticsearchException, Void> IGNORED_ERROR_PARSER =
|
||||||
new ConstructingObjectParser<>(
|
new ConstructingObjectParser<>(
|
||||||
"ignored_error_parser",
|
"ignored_error_parser",
|
||||||
|
@ -57,7 +56,6 @@ public class SimulateProcessorResult implements Writeable, ToXContentObject {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public static final ConstructingObjectParser<SimulateProcessorResult, Void> PARSER =
|
public static final ConstructingObjectParser<SimulateProcessorResult, Void> PARSER =
|
||||||
new ConstructingObjectParser<>(
|
new ConstructingObjectParser<>(
|
||||||
"simulate_processor_result",
|
"simulate_processor_result",
|
||||||
|
|
|
@ -94,7 +94,6 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public static final ConstructingObjectParser<WriteableIngestDocument, Void> PARSER =
|
public static final ConstructingObjectParser<WriteableIngestDocument, Void> PARSER =
|
||||||
new ConstructingObjectParser<>(
|
new ConstructingObjectParser<>(
|
||||||
"writeable_ingest_document",
|
"writeable_ingest_document",
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.elasticsearch.action.support;
|
package org.elasticsearch.action.support;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||||
|
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||||
|
@ -205,7 +206,7 @@ public final class ActiveShardCount implements Writeable {
|
||||||
if (o == null || getClass() != o.getClass()) {
|
if (o == null || getClass() != o.getClass()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@SuppressWarnings("unchecked") ActiveShardCount that = (ActiveShardCount) o;
|
ActiveShardCount that = (ActiveShardCount) o;
|
||||||
return value == that.value;
|
return value == that.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -72,7 +72,6 @@ public abstract class ReplicationRequestBuilder<Request extends ReplicationReque
|
||||||
* shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)}
|
* shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)}
|
||||||
* to get the ActiveShardCount.
|
* to get the ActiveShardCount.
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public RequestBuilder setWaitForActiveShards(final int waitForActiveShards) {
|
public RequestBuilder setWaitForActiveShards(final int waitForActiveShards) {
|
||||||
return setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards));
|
return setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards));
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@
|
||||||
package org.elasticsearch.action.termvectors;
|
package org.elasticsearch.action.termvectors;
|
||||||
|
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
|
||||||
import org.elasticsearch.action.support.ActionFilters;
|
import org.elasticsearch.action.support.ActionFilters;
|
||||||
import org.elasticsearch.action.support.TransportActions;
|
import org.elasticsearch.action.support.TransportActions;
|
||||||
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
|
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
|
||||||
|
@ -84,13 +83,13 @@ public class TransportShardMultiTermsVectorAction extends TransportSingleShardAc
|
||||||
try {
|
try {
|
||||||
TermVectorsResponse termVectorsResponse = TermVectorsService.getTermVectors(indexShard, termVectorsRequest);
|
TermVectorsResponse termVectorsResponse = TermVectorsService.getTermVectors(indexShard, termVectorsRequest);
|
||||||
response.add(request.locations.get(i), termVectorsResponse);
|
response.add(request.locations.get(i), termVectorsResponse);
|
||||||
} catch (Exception t) {
|
} catch (RuntimeException e) {
|
||||||
if (TransportActions.isShardNotAvailableException(t)) {
|
if (TransportActions.isShardNotAvailableException(e)) {
|
||||||
throw (ElasticsearchException) t;
|
throw e;
|
||||||
} else {
|
} else {
|
||||||
logger.debug(() -> new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), t);
|
logger.debug(() -> new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), e);
|
||||||
response.add(request.locations.get(i),
|
response.add(request.locations.get(i),
|
||||||
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), t));
|
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.elasticsearch.cluster;
|
package org.elasticsearch.cluster;
|
||||||
|
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.ClusterState.Custom;
|
import org.elasticsearch.cluster.ClusterState.Custom;
|
||||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||||
|
@ -165,7 +166,7 @@ public class RestoreInProgress extends AbstractNamedDiffable<Custom> implements
|
||||||
if (o == null || getClass() != o.getClass()) {
|
if (o == null || getClass() != o.getClass()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@SuppressWarnings("unchecked") Entry entry = (Entry) o;
|
Entry entry = (Entry) o;
|
||||||
return snapshot.equals(entry.snapshot) &&
|
return snapshot.equals(entry.snapshot) &&
|
||||||
state == entry.state &&
|
state == entry.state &&
|
||||||
indices.equals(entry.indices) &&
|
indices.equals(entry.indices) &&
|
||||||
|
@ -291,7 +292,7 @@ public class RestoreInProgress extends AbstractNamedDiffable<Custom> implements
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked") ShardRestoreStatus status = (ShardRestoreStatus) o;
|
ShardRestoreStatus status = (ShardRestoreStatus) o;
|
||||||
return state == status.state &&
|
return state == status.state &&
|
||||||
Objects.equals(nodeId, status.nodeId) &&
|
Objects.equals(nodeId, status.nodeId) &&
|
||||||
Objects.equals(reason, status.reason);
|
Objects.equals(reason, status.reason);
|
||||||
|
|
|
@ -161,7 +161,6 @@ public final class IndexGraveyard implements MetaData.Custom {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public Diff<MetaData.Custom> diff(final MetaData.Custom previous) {
|
public Diff<MetaData.Custom> diff(final MetaData.Custom previous) {
|
||||||
return new IndexGraveyardDiff((IndexGraveyard) previous, this);
|
return new IndexGraveyardDiff((IndexGraveyard) previous, this);
|
||||||
}
|
}
|
||||||
|
@ -321,7 +320,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public IndexGraveyard apply(final MetaData.Custom previous) {
|
public IndexGraveyard apply(final MetaData.Custom previous) {
|
||||||
@SuppressWarnings("unchecked") final IndexGraveyard old = (IndexGraveyard) previous;
|
final IndexGraveyard old = (IndexGraveyard) previous;
|
||||||
if (removedCount > old.tombstones.size()) {
|
if (removedCount > old.tombstones.size()) {
|
||||||
throw new IllegalStateException("IndexGraveyardDiff cannot remove [" + removedCount + "] entries from [" +
|
throw new IllegalStateException("IndexGraveyardDiff cannot remove [" + removedCount + "] entries from [" +
|
||||||
old.tombstones.size() + "] tombstones.");
|
old.tombstones.size() + "] tombstones.");
|
||||||
|
@ -416,7 +415,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
||||||
if (other == null || getClass() != other.getClass()) {
|
if (other == null || getClass() != other.getClass()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@SuppressWarnings("unchecked") Tombstone that = (Tombstone) other;
|
Tombstone that = (Tombstone) other;
|
||||||
return index.equals(that.index) && deleteDateInMillis == that.deleteDateInMillis;
|
return index.equals(that.index) && deleteDateInMillis == that.deleteDateInMillis;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ import com.carrotsearch.hppc.LongArrayList;
|
||||||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.admin.indices.rollover.RolloverInfo;
|
import org.elasticsearch.action.admin.indices.rollover.RolloverInfo;
|
||||||
import org.elasticsearch.action.support.ActiveShardCount;
|
import org.elasticsearch.action.support.ActiveShardCount;
|
||||||
|
@ -685,7 +686,6 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
||||||
return lookupPrototypeSafe(key).readFrom(in);
|
return lookupPrototypeSafe(key).readFrom(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
@Override
|
@Override
|
||||||
public Diff<Custom> readDiff(StreamInput in, String key) throws IOException {
|
public Diff<Custom> readDiff(StreamInput in, String key) throws IOException {
|
||||||
return lookupPrototypeSafe(key).readDiffFrom(in);
|
return lookupPrototypeSafe(key).readDiffFrom(in);
|
||||||
|
|
|
@ -381,7 +381,6 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
||||||
aliases.build(), customs.build());
|
aliases.build(), customs.build());
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public static void toXContent(IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params)
|
public static void toXContent(IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
builder.startObject(indexTemplateMetaData.name());
|
builder.startObject(indexTemplateMetaData.name());
|
||||||
|
|
|
@ -22,8 +22,10 @@ package org.elasticsearch.cluster.metadata;
|
||||||
import com.carrotsearch.hppc.ObjectHashSet;
|
import com.carrotsearch.hppc.ObjectHashSet;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.lucene.util.CollectionUtil;
|
import org.apache.lucene.util.CollectionUtil;
|
||||||
|
import org.elasticsearch.action.AliasesRequest;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.ClusterState.FeatureAware;
|
import org.elasticsearch.cluster.ClusterState.FeatureAware;
|
||||||
import org.elasticsearch.cluster.Diff;
|
import org.elasticsearch.cluster.Diff;
|
||||||
|
@ -168,7 +170,6 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
||||||
|
|
||||||
private final SortedMap<String, AliasOrIndex> aliasAndIndexLookup;
|
private final SortedMap<String, AliasOrIndex> aliasAndIndexLookup;
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
MetaData(String clusterUUID, long version, Settings transientSettings, Settings persistentSettings,
|
MetaData(String clusterUUID, long version, Settings transientSettings, Settings persistentSettings,
|
||||||
ImmutableOpenMap<String, IndexMetaData> indices, ImmutableOpenMap<String, IndexTemplateMetaData> templates,
|
ImmutableOpenMap<String, IndexMetaData> indices, ImmutableOpenMap<String, IndexTemplateMetaData> templates,
|
||||||
ImmutableOpenMap<String, Custom> customs, String[] allIndices, String[] allOpenIndices, String[] allClosedIndices,
|
ImmutableOpenMap<String, Custom> customs, String[] allIndices, String[] allOpenIndices, String[] allClosedIndices,
|
||||||
|
@ -248,21 +249,53 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
|
* Finds the specific index aliases that point to the specified concrete indices or match partially with the indices via wildcards.
|
||||||
* that point to the specified concrete indices or match partially with the indices via wildcards.
|
|
||||||
*
|
*
|
||||||
* @param aliases The names of the index aliases to find
|
|
||||||
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
|
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
|
||||||
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are
|
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are
|
||||||
* present for that index
|
* present for that index
|
||||||
*/
|
*/
|
||||||
public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final String[] aliases, String[] concreteIndices) {
|
public ImmutableOpenMap<String, List<AliasMetaData>> findAllAliases(String[] concreteIndices) {
|
||||||
|
return findAliases(Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, concreteIndices);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
|
||||||
|
* that point to the specified concrete indices or match partially with the indices via wildcards.
|
||||||
|
*
|
||||||
|
* @param aliasesRequest The request to find aliases for
|
||||||
|
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
|
||||||
|
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are
|
||||||
|
* present for that index
|
||||||
|
*/
|
||||||
|
public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final AliasesRequest aliasesRequest, String[] concreteIndices) {
|
||||||
|
return findAliases(aliasesRequest.getOriginalAliases(), aliasesRequest.aliases(), concreteIndices);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
|
||||||
|
* that point to the specified concrete indices or match partially with the indices via wildcards.
|
||||||
|
*
|
||||||
|
* @param aliases The aliases to look for
|
||||||
|
* @param originalAliases The original aliases that the user originally requested
|
||||||
|
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
|
||||||
|
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are
|
||||||
|
* present for that index
|
||||||
|
*/
|
||||||
|
private ImmutableOpenMap<String, List<AliasMetaData>> findAliases(String[] originalAliases, String[] aliases,
|
||||||
|
String[] concreteIndices) {
|
||||||
assert aliases != null;
|
assert aliases != null;
|
||||||
|
assert originalAliases != null;
|
||||||
assert concreteIndices != null;
|
assert concreteIndices != null;
|
||||||
if (concreteIndices.length == 0) {
|
if (concreteIndices.length == 0) {
|
||||||
return ImmutableOpenMap.of();
|
return ImmutableOpenMap.of();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//if aliases were provided but they got replaced with empty aliases, return empty map
|
||||||
|
if (originalAliases.length > 0 && aliases.length == 0) {
|
||||||
|
return ImmutableOpenMap.of();
|
||||||
|
}
|
||||||
|
|
||||||
boolean matchAllAliases = matchAllAliases(aliases);
|
boolean matchAllAliases = matchAllAliases(aliases);
|
||||||
ImmutableOpenMap.Builder<String, List<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
|
ImmutableOpenMap.Builder<String, List<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
|
||||||
for (String index : concreteIndices) {
|
for (String index : concreteIndices) {
|
||||||
|
@ -967,7 +1000,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
||||||
}
|
}
|
||||||
|
|
||||||
public IndexGraveyard indexGraveyard() {
|
public IndexGraveyard indexGraveyard() {
|
||||||
@SuppressWarnings("unchecked") IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE);
|
IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE);
|
||||||
return graveyard;
|
return graveyard;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -217,7 +217,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked") SnapshotRecoverySource that = (SnapshotRecoverySource) o;
|
SnapshotRecoverySource that = (SnapshotRecoverySource) o;
|
||||||
return snapshot.equals(that.snapshot) && index.equals(that.index) && version.equals(that.version);
|
return snapshot.equals(that.snapshot) && index.equals(that.index) && version.equals(that.version);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -175,7 +175,7 @@ public abstract class AbstractAllocationDecision implements ToXContentFragment,
|
||||||
if (other == null || other instanceof AbstractAllocationDecision == false) {
|
if (other == null || other instanceof AbstractAllocationDecision == false) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@SuppressWarnings("unchecked") AbstractAllocationDecision that = (AbstractAllocationDecision) other;
|
AbstractAllocationDecision that = (AbstractAllocationDecision) other;
|
||||||
return Objects.equals(targetNode, that.targetNode) && Objects.equals(nodeDecisions, that.nodeDecisions);
|
return Objects.equals(targetNode, that.targetNode) && Objects.equals(nodeDecisions, that.nodeDecisions);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -316,7 +316,7 @@ public class AllocateUnassignedDecision extends AbstractAllocationDecision {
|
||||||
if (other instanceof AllocateUnassignedDecision == false) {
|
if (other instanceof AllocateUnassignedDecision == false) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@SuppressWarnings("unchecked") AllocateUnassignedDecision that = (AllocateUnassignedDecision) other;
|
AllocateUnassignedDecision that = (AllocateUnassignedDecision) other;
|
||||||
return Objects.equals(allocationStatus, that.allocationStatus)
|
return Objects.equals(allocationStatus, that.allocationStatus)
|
||||||
&& Objects.equals(allocationId, that.allocationId)
|
&& Objects.equals(allocationId, that.allocationId)
|
||||||
&& reuseStore == that.reuseStore
|
&& reuseStore == that.reuseStore
|
||||||
|
|
|
@ -300,7 +300,7 @@ public final class MoveDecision extends AbstractAllocationDecision {
|
||||||
if (other instanceof MoveDecision == false) {
|
if (other instanceof MoveDecision == false) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@SuppressWarnings("unchecked") MoveDecision that = (MoveDecision) other;
|
MoveDecision that = (MoveDecision) other;
|
||||||
return Objects.equals(allocationDecision, that.allocationDecision)
|
return Objects.equals(allocationDecision, that.allocationDecision)
|
||||||
&& Objects.equals(canRemainDecision, that.canRemainDecision)
|
&& Objects.equals(canRemainDecision, that.canRemainDecision)
|
||||||
&& Objects.equals(clusterRebalanceDecision, that.clusterRebalanceDecision)
|
&& Objects.equals(clusterRebalanceDecision, that.clusterRebalanceDecision)
|
||||||
|
|
|
@ -54,7 +54,6 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple
|
||||||
listeners.remove(listener);
|
listeners.remove(listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked"})
|
|
||||||
@Override
|
@Override
|
||||||
public void start() {
|
public void start() {
|
||||||
if (!lifecycle.canMoveToStarted()) {
|
if (!lifecycle.canMoveToStarted()) {
|
||||||
|
@ -72,7 +71,6 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple
|
||||||
|
|
||||||
protected abstract void doStart();
|
protected abstract void doStart();
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked"})
|
|
||||||
@Override
|
@Override
|
||||||
public void stop() {
|
public void stop() {
|
||||||
if (!lifecycle.canMoveToStopped()) {
|
if (!lifecycle.canMoveToStopped()) {
|
||||||
|
|
|
@ -32,7 +32,6 @@ class ConstructorInjectorStore {
|
||||||
private final FailableCache<TypeLiteral<?>, ConstructorInjector<?>> cache
|
private final FailableCache<TypeLiteral<?>, ConstructorInjector<?>> cache
|
||||||
= new FailableCache<TypeLiteral<?>, ConstructorInjector<?>>() {
|
= new FailableCache<TypeLiteral<?>, ConstructorInjector<?>>() {
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
protected ConstructorInjector<?> create(TypeLiteral<?> type, Errors errors)
|
protected ConstructorInjector<?> create(TypeLiteral<?> type, Errors errors)
|
||||||
throws ErrorsException {
|
throws ErrorsException {
|
||||||
return createConstructor(type, errors);
|
return createConstructor(type, errors);
|
||||||
|
|
|
@ -101,7 +101,6 @@ class TypeConverterBindingProcessor extends AbstractProcessor {
|
||||||
},
|
},
|
||||||
new TypeConverter() {
|
new TypeConverter() {
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public Object convert(String value, TypeLiteral<?> toType) {
|
public Object convert(String value, TypeLiteral<?> toType) {
|
||||||
try {
|
try {
|
||||||
return Class.forName(value);
|
return Class.forName(value);
|
||||||
|
@ -128,7 +127,6 @@ class TypeConverterBindingProcessor extends AbstractProcessor {
|
||||||
|
|
||||||
TypeConverter typeConverter = new TypeConverter() {
|
TypeConverter typeConverter = new TypeConverter() {
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public Object convert(String value, TypeLiteral<?> toType) {
|
public Object convert(String value, TypeLiteral<?> toType) {
|
||||||
try {
|
try {
|
||||||
return parser.invoke(null, value);
|
return parser.invoke(null, value);
|
||||||
|
|
|
@ -42,7 +42,6 @@ class AssistedConstructor<T> {
|
||||||
private final ParameterListKey assistedParameters;
|
private final ParameterListKey assistedParameters;
|
||||||
private final List<Parameter> allParameters;
|
private final List<Parameter> allParameters;
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
AssistedConstructor(Constructor<T> constructor, List<TypeLiteral<?>> parameterTypes) {
|
AssistedConstructor(Constructor<T> constructor, List<TypeLiteral<?>> parameterTypes) {
|
||||||
this.constructor = constructor;
|
this.constructor = constructor;
|
||||||
|
|
||||||
|
|
|
@ -97,7 +97,7 @@ public class ProviderMethod<T> implements ProviderWithDependencies<T> {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// We know this cast is safe because T is the method's return type.
|
// We know this cast is safe because T is the method's return type.
|
||||||
@SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"})
|
@SuppressWarnings({"unchecked"})
|
||||||
T result = (T) method.invoke(instance, parameters);
|
T result = (T) method.invoke(instance, parameters);
|
||||||
return result;
|
return result;
|
||||||
} catch (IllegalAccessException e) {
|
} catch (IllegalAccessException e) {
|
||||||
|
|
|
@ -220,7 +220,6 @@ public abstract class Multibinder<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public void configure(Binder binder) {
|
public void configure(Binder binder) {
|
||||||
checkConfiguration(!isInitialized(), "Multibinder was already initialized");
|
checkConfiguration(!isInitialized(), "Multibinder was already initialized");
|
||||||
|
|
||||||
|
|
|
@ -78,8 +78,7 @@ public abstract class DefaultBindingTargetVisitor<T, V> implements BindingTarget
|
||||||
|
|
||||||
// javac says it's an error to cast ProviderBinding<? extends T> to Binding<? extends T>
|
// javac says it's an error to cast ProviderBinding<? extends T> to Binding<? extends T>
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public V visit(ProviderBinding<? extends T> providerBinding) {
|
public V visit(ProviderBinding<? extends T> providerBinding) {
|
||||||
return visitOther((Binding<? extends T>) providerBinding);
|
return visitOther(providerBinding);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -518,7 +518,6 @@ public abstract class StreamInput extends InputStream {
|
||||||
return (Map<String, Object>) readGenericValue();
|
return (Map<String, Object>) readGenericValue();
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked"})
|
|
||||||
@Nullable
|
@Nullable
|
||||||
public Object readGenericValue() throws IOException {
|
public Object readGenericValue() throws IOException {
|
||||||
byte type = readByte();
|
byte type = readByte();
|
||||||
|
|
|
@ -178,7 +178,6 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
||||||
/**
|
/**
|
||||||
* The source of the document (As a map).
|
* The source of the document (As a map).
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings({"unchecked"})
|
|
||||||
public Map<String, Object> sourceAsMap() throws ElasticsearchParseException {
|
public Map<String, Object> sourceAsMap() throws ElasticsearchParseException {
|
||||||
if (source == null) {
|
if (source == null) {
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -197,7 +197,6 @@ public class DocumentMapper implements ToXContentFragment {
|
||||||
return mapping.root;
|
return mapping.root;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked"})
|
|
||||||
public <T extends MetadataFieldMapper> T metadataMapper(Class<T> type) {
|
public <T extends MetadataFieldMapper> T metadataMapper(Class<T> type) {
|
||||||
return mapping.metadataMapper(type);
|
return mapping.metadataMapper(type);
|
||||||
}
|
}
|
||||||
|
|
|
@ -427,7 +427,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||||
// the master node restoring mappings from disk or data nodes
|
// the master node restoring mappings from disk or data nodes
|
||||||
// deserializing cluster state that was sent by the master node,
|
// deserializing cluster state that was sent by the master node,
|
||||||
// this check will be skipped.
|
// this check will be skipped.
|
||||||
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size());
|
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size() + fieldAliasMappers.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
results.put(newMapper.type(), newMapper);
|
results.put(newMapper.type(), newMapper);
|
||||||
|
|
|
@ -230,7 +230,9 @@ public class TypeParsers {
|
||||||
} else {
|
} else {
|
||||||
throw new MapperParsingException("no type specified for property [" + multiFieldName + "]");
|
throw new MapperParsingException("no type specified for property [" + multiFieldName + "]");
|
||||||
}
|
}
|
||||||
if (type.equals(ObjectMapper.CONTENT_TYPE) || type.equals(ObjectMapper.NESTED_CONTENT_TYPE)) {
|
if (type.equals(ObjectMapper.CONTENT_TYPE)
|
||||||
|
|| type.equals(ObjectMapper.NESTED_CONTENT_TYPE)
|
||||||
|
|| type.equals(FieldAliasMapper.CONTENT_TYPE)) {
|
||||||
throw new MapperParsingException("Type [" + type + "] cannot be used in multi field");
|
throw new MapperParsingException("Type [" + type + "] cannot be used in multi field");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -37,6 +37,7 @@ import java.util.List;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* CircuitBreakerService that attempts to redistribute space between breakers
|
* CircuitBreakerService that attempts to redistribute space between breakers
|
||||||
|
@ -215,7 +216,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
|
||||||
}
|
}
|
||||||
// Manually add the parent breaker settings since they aren't part of the breaker map
|
// Manually add the parent breaker settings since they aren't part of the breaker map
|
||||||
allStats.add(new CircuitBreakerStats(CircuitBreaker.PARENT, parentSettings.getLimit(),
|
allStats.add(new CircuitBreakerStats(CircuitBreaker.PARENT, parentSettings.getLimit(),
|
||||||
parentUsed(0L), 1.0, parentTripCount.get()));
|
parentUsed(0L).totalUsage, 1.0, parentTripCount.get()));
|
||||||
return new AllCircuitBreakerStats(allStats.toArray(new CircuitBreakerStats[allStats.size()]));
|
return new AllCircuitBreakerStats(allStats.toArray(new CircuitBreakerStats[allStats.size()]));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -225,15 +226,26 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
|
||||||
return new CircuitBreakerStats(breaker.getName(), breaker.getLimit(), breaker.getUsed(), breaker.getOverhead(), breaker.getTrippedCount());
|
return new CircuitBreakerStats(breaker.getName(), breaker.getLimit(), breaker.getUsed(), breaker.getOverhead(), breaker.getTrippedCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
private long parentUsed(long newBytesReserved) {
|
private static class ParentMemoryUsage {
|
||||||
|
final long baseUsage;
|
||||||
|
final long totalUsage;
|
||||||
|
|
||||||
|
ParentMemoryUsage(final long baseUsage, final long totalUsage) {
|
||||||
|
this.baseUsage = baseUsage;
|
||||||
|
this.totalUsage = totalUsage;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private ParentMemoryUsage parentUsed(long newBytesReserved) {
|
||||||
if (this.trackRealMemoryUsage) {
|
if (this.trackRealMemoryUsage) {
|
||||||
return currentMemoryUsage() + newBytesReserved;
|
final long current = currentMemoryUsage();
|
||||||
|
return new ParentMemoryUsage(current, current + newBytesReserved);
|
||||||
} else {
|
} else {
|
||||||
long parentEstimated = 0;
|
long parentEstimated = 0;
|
||||||
for (CircuitBreaker breaker : this.breakers.values()) {
|
for (CircuitBreaker breaker : this.breakers.values()) {
|
||||||
parentEstimated += breaker.getUsed() * breaker.getOverhead();
|
parentEstimated += breaker.getUsed() * breaker.getOverhead();
|
||||||
}
|
}
|
||||||
return parentEstimated;
|
return new ParentMemoryUsage(parentEstimated, parentEstimated);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -246,15 +258,37 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
|
||||||
* Checks whether the parent breaker has been tripped
|
* Checks whether the parent breaker has been tripped
|
||||||
*/
|
*/
|
||||||
public void checkParentLimit(long newBytesReserved, String label) throws CircuitBreakingException {
|
public void checkParentLimit(long newBytesReserved, String label) throws CircuitBreakingException {
|
||||||
long totalUsed = parentUsed(newBytesReserved);
|
final ParentMemoryUsage parentUsed = parentUsed(newBytesReserved);
|
||||||
long parentLimit = this.parentSettings.getLimit();
|
long parentLimit = this.parentSettings.getLimit();
|
||||||
if (totalUsed > parentLimit) {
|
if (parentUsed.totalUsage > parentLimit) {
|
||||||
this.parentTripCount.incrementAndGet();
|
this.parentTripCount.incrementAndGet();
|
||||||
final String message = "[parent] Data too large, data for [" + label + "]" +
|
final StringBuilder message = new StringBuilder("[parent] Data too large, data for [" + label + "]" +
|
||||||
" would be [" + totalUsed + "/" + new ByteSizeValue(totalUsed) + "]" +
|
" would be [" + parentUsed.totalUsage + "/" + new ByteSizeValue(parentUsed.totalUsage) + "]" +
|
||||||
", which is larger than the limit of [" +
|
", which is larger than the limit of [" +
|
||||||
parentLimit + "/" + new ByteSizeValue(parentLimit) + "]";
|
parentLimit + "/" + new ByteSizeValue(parentLimit) + "]");
|
||||||
throw new CircuitBreakingException(message, totalUsed, parentLimit);
|
if (this.trackRealMemoryUsage) {
|
||||||
|
final long realUsage = parentUsed.baseUsage;
|
||||||
|
message.append(", real usage: [");
|
||||||
|
message.append(realUsage);
|
||||||
|
message.append("/");
|
||||||
|
message.append(new ByteSizeValue(realUsage));
|
||||||
|
message.append("], new bytes reserved: [");
|
||||||
|
message.append(newBytesReserved);
|
||||||
|
message.append("/");
|
||||||
|
message.append(new ByteSizeValue(newBytesReserved));
|
||||||
|
message.append("]");
|
||||||
|
} else {
|
||||||
|
message.append(", usages [");
|
||||||
|
message.append(String.join(", ",
|
||||||
|
this.breakers.entrySet().stream().map(e -> {
|
||||||
|
final CircuitBreaker breaker = e.getValue();
|
||||||
|
final long breakerUsed = (long)(breaker.getUsed() * breaker.getOverhead());
|
||||||
|
return e.getKey() + "=" + breakerUsed + "/" + new ByteSizeValue(breakerUsed);
|
||||||
|
})
|
||||||
|
.collect(Collectors.toList())));
|
||||||
|
message.append("]");
|
||||||
|
}
|
||||||
|
throw new CircuitBreakingException(message.toString(), parentUsed.totalUsage, parentLimit);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,6 @@ public class NodePersistentTasksExecutor {
|
||||||
task.markAsFailed(e);
|
task.markAsFailed(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
@Override
|
@Override
|
||||||
protected void doRun() throws Exception {
|
protected void doRun() throws Exception {
|
||||||
try {
|
try {
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue