Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
5f696e15ea
|
@ -222,7 +222,11 @@ class BuildPlugin implements Plugin<Project> {
|
|||
// IntelliJ does not set JAVA_HOME, so we use the JDK that Gradle was run with
|
||||
return Jvm.current().javaHome
|
||||
} else {
|
||||
throw new GradleException("JAVA_HOME must be set to build Elasticsearch")
|
||||
throw new GradleException(
|
||||
"JAVA_HOME must be set to build Elasticsearch. " +
|
||||
"Note that if the variable was just set you might have to run `./gradlew --stop` for " +
|
||||
"it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details."
|
||||
)
|
||||
}
|
||||
}
|
||||
return javaHome
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.Map;
|
|||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
|
||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments;
|
||||
|
||||
public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
|
@ -84,7 +84,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
|||
Map<String, EvalQueryQuality> partialResults = response.getPartialResults();
|
||||
assertEquals(2, partialResults.size());
|
||||
EvalQueryQuality amsterdamQueryQuality = partialResults.get("amsterdam_query");
|
||||
assertEquals(2, filterUnknownDocuments(amsterdamQueryQuality.getHitsAndRatings()).size());
|
||||
assertEquals(2, filterUnratedDocuments(amsterdamQueryQuality.getHitsAndRatings()).size());
|
||||
List<RatedSearchHit> hitsAndRatings = amsterdamQueryQuality.getHitsAndRatings();
|
||||
assertEquals(7, hitsAndRatings.size());
|
||||
for (RatedSearchHit hit : hitsAndRatings) {
|
||||
|
@ -96,7 +96,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query");
|
||||
assertEquals(6, filterUnknownDocuments(berlinQueryQuality.getHitsAndRatings()).size());
|
||||
assertEquals(6, filterUnratedDocuments(berlinQueryQuality.getHitsAndRatings()).size());
|
||||
hitsAndRatings = berlinQueryQuality.getHitsAndRatings();
|
||||
assertEquals(7, hitsAndRatings.size());
|
||||
for (RatedSearchHit hit : hitsAndRatings) {
|
||||
|
|
|
@ -724,8 +724,8 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
assertEquals(0, method.getExceptionTypes().length);
|
||||
assertEquals(3, method.getParameterTypes().length);
|
||||
assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request"));
|
||||
assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName()));
|
||||
assertThat(method.getParameterTypes()[2].getName(), equalTo(ActionListener.class.getName()));
|
||||
assertThat(method.getParameterTypes()[1], equalTo(RequestOptions.class));
|
||||
assertThat(method.getParameterTypes()[2], equalTo(ActionListener.class));
|
||||
} else {
|
||||
//A few methods return a boolean rather than a response object
|
||||
if (apiName.equals("ping") || apiName.contains("exist")) {
|
||||
|
@ -738,18 +738,23 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
//a few methods don't accept a request object as argument
|
||||
if (apiName.equals("ping") || apiName.equals("info")) {
|
||||
assertEquals(1, method.getParameterTypes().length);
|
||||
assertThat(method.getParameterTypes()[0].getName(), equalTo(RequestOptions.class.getName()));
|
||||
assertThat(method.getParameterTypes()[0], equalTo(RequestOptions.class));
|
||||
} else {
|
||||
assertEquals(apiName, 2, method.getParameterTypes().length);
|
||||
assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request"));
|
||||
assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName()));
|
||||
assertThat(method.getParameterTypes()[1], equalTo(RequestOptions.class));
|
||||
}
|
||||
|
||||
boolean remove = apiSpec.remove(apiName);
|
||||
if (remove == false && deprecatedMethods.contains(apiName) == false) {
|
||||
//TODO xpack api are currently ignored, we need to load xpack yaml spec too
|
||||
if (apiName.startsWith("xpack.") == false) {
|
||||
apiNotFound.add(apiName);
|
||||
if (remove == false) {
|
||||
if (deprecatedMethods.contains(apiName)) {
|
||||
assertTrue("method [" + method.getName() + "], api [" + apiName + "] should be deprecated",
|
||||
method.isAnnotationPresent(Deprecated.class));
|
||||
} else {
|
||||
//TODO xpack api are currently ignored, we need to load xpack yaml spec too
|
||||
if (apiName.startsWith("xpack.") == false) {
|
||||
apiNotFound.add(apiName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.transport.client;
|
|||
|
||||
import io.netty.util.ThreadDeathWatcher;
|
||||
import io.netty.util.concurrent.GlobalEventExecutor;
|
||||
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
|
|
|
@ -49,7 +49,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os
|
|||
return copySpec {
|
||||
into("elasticsearch-${version}") {
|
||||
into('lib') {
|
||||
with libFiles
|
||||
with libFiles(oss)
|
||||
}
|
||||
into('config') {
|
||||
dirMode 0750
|
||||
|
|
|
@ -227,16 +227,24 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
|||
/*****************************************************************************
|
||||
* Common files in all distributions *
|
||||
*****************************************************************************/
|
||||
libFiles = copySpec {
|
||||
// delay by using closures, since they have not yet been configured, so no jar task exists yet
|
||||
from { project(':server').jar }
|
||||
from { project(':server').configurations.runtime }
|
||||
from { project(':libs:plugin-classloader').jar }
|
||||
from { project(':distribution:tools:java-version-checker').jar }
|
||||
from { project(':distribution:tools:launchers').jar }
|
||||
into('tools/plugin-cli') {
|
||||
from { project(':distribution:tools:plugin-cli').jar }
|
||||
from { project(':distribution:tools:plugin-cli').configurations.runtime }
|
||||
libFiles = { oss ->
|
||||
copySpec {
|
||||
// delay by using closures, since they have not yet been configured, so no jar task exists yet
|
||||
from { project(':server').jar }
|
||||
from { project(':server').configurations.runtime }
|
||||
from { project(':libs:plugin-classloader').jar }
|
||||
from { project(':distribution:tools:java-version-checker').jar }
|
||||
from { project(':distribution:tools:launchers').jar }
|
||||
into('tools/plugin-cli') {
|
||||
from { project(':distribution:tools:plugin-cli').jar }
|
||||
from { project(':distribution:tools:plugin-cli').configurations.runtime }
|
||||
}
|
||||
if (oss == false) {
|
||||
into('tools/security-cli') {
|
||||
from { project(':x-pack:plugin:security:cli').jar }
|
||||
from { project(':x-pack:plugin:security:cli').configurations.compile }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ Closure commonPackageConfig(String type, boolean oss) {
|
|||
}
|
||||
into('lib') {
|
||||
with copySpec {
|
||||
with libFiles
|
||||
with libFiles(oss)
|
||||
// we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine
|
||||
eachFile { FileCopyDetails fcp ->
|
||||
String[] segments = fcp.relativePath.segments
|
||||
|
|
|
@ -26,14 +26,14 @@ include::install_remove.asciidoc[]
|
|||
| `field` | yes | - | The field to get the ip address from for the geographical lookup.
|
||||
| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database.
|
||||
| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb, GeoLite2-Country.mmdb and GeoLite2-ASN.mmdb files.
|
||||
| `properties` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup.
|
||||
| `properties` | no | [`continent_name`, `country_iso_code`, `region_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup.
|
||||
| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document
|
||||
|======
|
||||
|
||||
*Depends on what is available in `database_field`:
|
||||
|
||||
* If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`,
|
||||
`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
|
||||
`country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
|
||||
and `location`. The fields actually added depend on what has been found and which properties were configured in `properties`.
|
||||
* If the GeoLite2 Country database is used, then the following fields may be added under the `target_field`: `ip`,
|
||||
`country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which properties
|
||||
|
|
|
@ -1732,6 +1732,10 @@ For example, if you have a log message which contains `ip=1.2.3.4 error=REFUSED`
|
|||
| `include_keys` | no | `null` | List of keys to filter and insert into document. Defaults to including all keys
|
||||
| `exclude_keys` | no | `null` | List of keys to exclude from document
|
||||
| `ignore_missing` | no | `false` | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document
|
||||
| `prefix` | no | `null` | Prefix to be added to extracted keys
|
||||
| `trim_key` | no | `null` | String of characters to trim from extracted keys
|
||||
| `trim_value` | no | `null` | String of characters to trim from extracted values
|
||||
| `strip_brackets` | no | `false` | If `true` strip brackets `()`, `<>`, `[]` as well as quotes `'` and `"` from extracted values
|
||||
|======
|
||||
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ field alias to query over multiple target fields in a single clause.
|
|||
==== Unsupported APIs
|
||||
|
||||
Writes to field aliases are not supported: attempting to use an alias in an index or update request
|
||||
will result in a failure. Likewise, aliases cannot be used as the target of `copy_to`.
|
||||
will result in a failure. Likewise, aliases cannot be used as the target of `copy_to` or in multi-fields.
|
||||
|
||||
Because alias names are not present in the document source, aliases cannot be used when performing
|
||||
source filtering. For example, the following request will return an empty result for `_source`:
|
||||
|
|
|
@ -79,3 +79,11 @@ the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in
|
|||
==== The deprecated stored script contexts have now been removed
|
||||
When putting stored scripts, support for storing them with the deprecated `template` context or without a context is
|
||||
now removed. Scripts must be stored using the `script` context as mentioned in the documentation.
|
||||
|
||||
==== Get Aliases API limitations when {security} is enabled removed
|
||||
|
||||
The behavior and response codes of the get aliases API no longer vary
|
||||
depending on whether {security} is enabled. Previously a
|
||||
404 - NOT FOUND (IndexNotFoundException) could be returned in case the
|
||||
current user was not authorized for any alias. An empty response with
|
||||
status 200 - OK is now returned instead at all times.
|
||||
|
|
|
@ -274,7 +274,7 @@ that shows potential errors of individual queries. The response has the followin
|
|||
"details": {
|
||||
"my_query_id1": { <2>
|
||||
"quality_level": 0.6, <3>
|
||||
"unknown_docs": [ <4>
|
||||
"unrated_docs": [ <4>
|
||||
{
|
||||
"_index": "my_index",
|
||||
"_id": "1960795"
|
||||
|
@ -309,7 +309,7 @@ that shows potential errors of individual queries. The response has the followin
|
|||
<1> the overall evaluation quality calculated by the defined metric
|
||||
<2> the `details` section contains one entry for every query in the original `requests` section, keyed by the search request id
|
||||
<3> the `quality_level` in the `details` section shows the contribution of this query to the global quality score
|
||||
<4> the `unknown_docs` section contains an `_index` and `_id` entry for each document in the search result for this
|
||||
<4> the `unrated_docs` section contains an `_index` and `_id` entry for each document in the search result for this
|
||||
query that didn't have a ratings value. This can be used to ask the user to supply ratings for these documents
|
||||
<5> the `hits` section shows a grouping of the search results with their supplied rating
|
||||
<6> the `metric_details` give additional information about the calculated quality metric (e.g. how many of the retrieved
|
||||
|
|
|
@ -85,10 +85,6 @@ You can update this setting through the
|
|||
|
||||
Sets the timeout for collecting index statistics. Defaults to `10s`.
|
||||
|
||||
`xpack.monitoring.collection.indices.stats.timeout`::
|
||||
|
||||
Sets the timeout for collecting total indices statistics. Defaults to `10s`.
|
||||
|
||||
`xpack.monitoring.collection.index.recovery.active_only`::
|
||||
|
||||
Controls whether or not all recoveries are collected. Set to `true` to
|
||||
|
|
|
@ -47,6 +47,8 @@ include::set-paths-tip.asciidoc[]
|
|||
Use the `elasticsearch-plugin` script to install the upgraded version of each
|
||||
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
|
||||
a node.
|
||||
+
|
||||
include::remove-xpack.asciidoc[]
|
||||
|
||||
. *Start each upgraded node.*
|
||||
+
|
||||
|
@ -91,7 +93,7 @@ already have local shard copies.
|
|||
+
|
||||
--
|
||||
When all nodes have joined the cluster and recovered their primary shards,
|
||||
reenable allocation by restoring `cluster.routing.allocation.enable` to its
|
||||
reenable allocation by restoring `cluster.routing.allocation.enable` to its
|
||||
default:
|
||||
|
||||
[source,js]
|
||||
|
@ -123,4 +125,4 @@ GET _cat/recovery
|
|||
// CONSOLE
|
||||
--
|
||||
|
||||
. *Restart machine learning jobs.*
|
||||
. *Restart machine learning jobs.*
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
IMPORTANT: If you use {xpack} and are upgrading from a version prior to 6.3,
|
||||
remove {xpack} before restarting: `bin/elasticsearch-plugin remove x-pack`. As
|
||||
of 6.3, {xpack} is included in the default distribution. The node will fail to
|
||||
start if the old {xpack} plugin is present.
|
|
@ -53,6 +53,8 @@ include::set-paths-tip.asciidoc[]
|
|||
Use the `elasticsearch-plugin` script to install the upgraded version of each
|
||||
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
|
||||
a node.
|
||||
+
|
||||
include::remove-xpack.asciidoc[]
|
||||
|
||||
. *Start the upgraded node.*
|
||||
+
|
||||
|
@ -144,7 +146,7 @@ for each node that needs to be updated.
|
|||
|
||||
--
|
||||
|
||||
. *Restart machine learning jobs.*
|
||||
. *Restart machine learning jobs.*
|
||||
|
||||
[IMPORTANT]
|
||||
====================================================
|
||||
|
|
|
@ -137,7 +137,6 @@ public class ChannelFactoryTests extends ESTestCase {
|
|||
super(rawChannelFactory);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public NioSocketChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException {
|
||||
NioSocketChannel nioSocketChannel = new NioSocketChannel(channel);
|
||||
|
|
|
@ -120,7 +120,6 @@ public class EventHandlerTests extends ESTestCase {
|
|||
verify(channelFactory, times(2)).acceptNioChannel(same(serverContext), same(selectorSupplier));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testHandleAcceptCallsServerAcceptCallback() throws IOException {
|
||||
NioSocketChannel childChannel = new NioSocketChannel(mock(SocketChannel.class));
|
||||
SocketChannelContext childContext = mock(SocketChannelContext.class);
|
||||
|
|
|
@ -275,7 +275,6 @@ public class SocketChannelContextTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCloseClosesChannelBuffer() throws IOException {
|
||||
try (SocketChannel realChannel = SocketChannel.open()) {
|
||||
when(channel.getRawChannel()).thenReturn(realChannel);
|
||||
|
|
|
@ -43,7 +43,6 @@ class MultiPassStats {
|
|||
this.fieldBKey = fieldBName;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
void computeStats(final List<Double> fieldA, final List<Double> fieldB) {
|
||||
// set count
|
||||
count = fieldA.size();
|
||||
|
|
|
@ -20,11 +20,17 @@
|
|||
esplugin {
|
||||
description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources'
|
||||
classname 'org.elasticsearch.ingest.common.IngestCommonPlugin'
|
||||
extendedPlugins = ['lang-painless']
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compileOnly project(':modules:lang-painless')
|
||||
compile project(':libs:grok')
|
||||
}
|
||||
|
||||
compileJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes"
|
||||
compileTestJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes"
|
||||
|
||||
integTestCluster {
|
||||
module project(':modules:lang-painless')
|
||||
}
|
|
@ -35,9 +35,13 @@ public final class BytesProcessor extends AbstractStringProcessor {
|
|||
super(processorTag, field, ignoreMissing, targetField);
|
||||
}
|
||||
|
||||
public static long apply(String value) {
|
||||
return ByteSizeValue.parseBytesSizeValue(value, null, "Ingest Field").getBytes();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Long process(String value) {
|
||||
return ByteSizeValue.parseBytesSizeValue(value, null, getField()).getBytes();
|
||||
return apply(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -67,13 +67,11 @@ public final class JsonProcessor extends AbstractProcessor {
|
|||
return addToRoot;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(IngestDocument document) throws Exception {
|
||||
Object fieldValue = document.getFieldValue(field, Object.class);
|
||||
BytesReference bytesRef = (fieldValue == null) ? new BytesArray("null") : new BytesArray(fieldValue.toString());
|
||||
public static Object apply(Object fieldValue) {
|
||||
BytesReference bytesRef = fieldValue == null ? new BytesArray("null") : new BytesArray(fieldValue.toString());
|
||||
try (InputStream stream = bytesRef.streamInput();
|
||||
XContentParser parser = JsonXContent.jsonXContent
|
||||
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) {
|
||||
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
Object value = null;
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
|
@ -91,20 +89,32 @@ public final class JsonProcessor extends AbstractProcessor {
|
|||
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||
throw new IllegalArgumentException("cannot read binary value");
|
||||
}
|
||||
if (addToRoot && (value instanceof Map)) {
|
||||
for (Map.Entry<String, Object> entry : ((Map<String, Object>) value).entrySet()) {
|
||||
document.setFieldValue(entry.getKey(), entry.getValue());
|
||||
}
|
||||
} else if (addToRoot) {
|
||||
throw new IllegalArgumentException("cannot add non-map fields to root of document");
|
||||
} else {
|
||||
document.setFieldValue(targetField, value);
|
||||
}
|
||||
return value;
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public static void apply(Map<String, Object> ctx, String fieldName) {
|
||||
Object value = apply(ctx.get(fieldName));
|
||||
if (value instanceof Map) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> map = (Map<String, Object>) value;
|
||||
ctx.putAll(map);
|
||||
} else {
|
||||
throw new IllegalArgumentException("cannot add non-map fields to root of document");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(IngestDocument document) throws Exception {
|
||||
if (addToRoot) {
|
||||
apply(document.getSourceAndMetadata(), field);
|
||||
} else {
|
||||
document.setFieldValue(targetField, apply(document.getFieldValue(field, Object.class)));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return TYPE;
|
||||
|
|
|
@ -25,11 +25,14 @@ import org.elasticsearch.ingest.ConfigurationUtils;
|
|||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* The KeyValueProcessor parses and extracts messages of the `key=value` variety into fields with values of the keys.
|
||||
|
@ -38,6 +41,8 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
|||
|
||||
public static final String TYPE = "kv";
|
||||
|
||||
private static final Pattern STRIP_BRACKETS = Pattern.compile("(^[\\(\\[<\"'])|([\\]\\)>\"']$)");
|
||||
|
||||
private final String field;
|
||||
private final String fieldSplit;
|
||||
private final String valueSplit;
|
||||
|
@ -45,9 +50,11 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
|||
private final Set<String> excludeKeys;
|
||||
private final String targetField;
|
||||
private final boolean ignoreMissing;
|
||||
private final Consumer<IngestDocument> execution;
|
||||
|
||||
KeyValueProcessor(String tag, String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
|
||||
Set<String> excludeKeys, String targetField, boolean ignoreMissing) {
|
||||
Set<String> excludeKeys, String targetField, boolean ignoreMissing,
|
||||
String trimKey, String trimValue, boolean stripBrackets, String prefix) {
|
||||
super(tag);
|
||||
this.field = field;
|
||||
this.targetField = targetField;
|
||||
|
@ -56,6 +63,92 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
|||
this.includeKeys = includeKeys;
|
||||
this.excludeKeys = excludeKeys;
|
||||
this.ignoreMissing = ignoreMissing;
|
||||
this.execution = buildExecution(
|
||||
fieldSplit, valueSplit, field, includeKeys, excludeKeys, targetField, ignoreMissing, trimKey, trimValue,
|
||||
stripBrackets, prefix
|
||||
);
|
||||
}
|
||||
|
||||
private static Consumer<IngestDocument> buildExecution(String fieldSplit, String valueSplit, String field,
|
||||
Set<String> includeKeys, Set<String> excludeKeys,
|
||||
String targetField, boolean ignoreMissing,
|
||||
String trimKey, String trimValue, boolean stripBrackets,
|
||||
String prefix) {
|
||||
final Predicate<String> keyFilter;
|
||||
if (includeKeys == null) {
|
||||
if (excludeKeys == null) {
|
||||
keyFilter = key -> true;
|
||||
} else {
|
||||
keyFilter = key -> excludeKeys.contains(key) == false;
|
||||
}
|
||||
} else {
|
||||
if (excludeKeys == null) {
|
||||
keyFilter = includeKeys::contains;
|
||||
} else {
|
||||
keyFilter = key -> includeKeys.contains(key) && excludeKeys.contains(key) == false;
|
||||
}
|
||||
}
|
||||
final String fieldPathPrefix;
|
||||
String keyPrefix = prefix == null ? "" : prefix;
|
||||
if (targetField == null) {
|
||||
fieldPathPrefix = keyPrefix;
|
||||
} else {
|
||||
fieldPathPrefix = targetField + "." + keyPrefix;
|
||||
}
|
||||
final Function<String, String> keyPrefixer;
|
||||
if (fieldPathPrefix.isEmpty()) {
|
||||
keyPrefixer = val -> val;
|
||||
} else {
|
||||
keyPrefixer = val -> fieldPathPrefix + val;
|
||||
}
|
||||
final Function<String, String[]> fieldSplitter = buildSplitter(fieldSplit, true);
|
||||
Function<String, String[]> valueSplitter = buildSplitter(valueSplit, false);
|
||||
final Function<String, String> keyTrimmer = buildTrimmer(trimKey);
|
||||
final Function<String, String> bracketStrip;
|
||||
if (stripBrackets) {
|
||||
bracketStrip = val -> STRIP_BRACKETS.matcher(val).replaceAll("");
|
||||
} else {
|
||||
bracketStrip = val -> val;
|
||||
}
|
||||
final Function<String, String> valueTrimmer = buildTrimmer(trimValue);
|
||||
return document -> {
|
||||
String value = document.getFieldValue(field, String.class, ignoreMissing);
|
||||
if (value == null) {
|
||||
if (ignoreMissing) {
|
||||
return;
|
||||
}
|
||||
throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs.");
|
||||
}
|
||||
for (String part : fieldSplitter.apply(value)) {
|
||||
String[] kv = valueSplitter.apply(part);
|
||||
if (kv.length != 2) {
|
||||
throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]");
|
||||
}
|
||||
String key = keyTrimmer.apply(kv[0]);
|
||||
if (keyFilter.test(key)) {
|
||||
append(document, keyPrefixer.apply(key), valueTrimmer.apply(bracketStrip.apply(kv[1])));
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static Function<String, String> buildTrimmer(String trim) {
|
||||
if (trim == null) {
|
||||
return val -> val;
|
||||
} else {
|
||||
Pattern pattern = Pattern.compile("(^([" + trim + "]+))|([" + trim + "]+$)");
|
||||
return val -> pattern.matcher(val).replaceAll("");
|
||||
}
|
||||
}
|
||||
|
||||
private static Function<String, String[]> buildSplitter(String split, boolean fields) {
|
||||
int limit = fields ? 0 : 2;
|
||||
if (split.length() > 2 || split.length() == 2 && split.charAt(0) != '\\') {
|
||||
Pattern splitPattern = Pattern.compile(split);
|
||||
return val -> splitPattern.split(val, limit);
|
||||
} else {
|
||||
return val -> val.split(split, limit);
|
||||
}
|
||||
}
|
||||
|
||||
String getField() {
|
||||
|
@ -86,7 +179,7 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
|||
return ignoreMissing;
|
||||
}
|
||||
|
||||
public void append(IngestDocument document, String targetField, String value) {
|
||||
private static void append(IngestDocument document, String targetField, String value) {
|
||||
if (document.hasField(targetField)) {
|
||||
document.appendFieldValue(targetField, value);
|
||||
} else {
|
||||
|
@ -96,27 +189,7 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public void execute(IngestDocument document) {
|
||||
String oldVal = document.getFieldValue(field, String.class, ignoreMissing);
|
||||
|
||||
if (oldVal == null && ignoreMissing) {
|
||||
return;
|
||||
} else if (oldVal == null) {
|
||||
throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs.");
|
||||
}
|
||||
|
||||
String fieldPathPrefix = (targetField == null) ? "" : targetField + ".";
|
||||
Arrays.stream(oldVal.split(fieldSplit))
|
||||
.map((f) -> {
|
||||
String[] kv = f.split(valueSplit, 2);
|
||||
if (kv.length != 2) {
|
||||
throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]");
|
||||
}
|
||||
return kv;
|
||||
})
|
||||
.filter((p) ->
|
||||
(includeKeys == null || includeKeys.contains(p[0])) &&
|
||||
(excludeKeys == null || excludeKeys.contains(p[0]) == false))
|
||||
.forEach((p) -> append(document, fieldPathPrefix + p[0], p[1]));
|
||||
execution.accept(document);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -132,6 +205,11 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
|||
String targetField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "target_field");
|
||||
String fieldSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field_split");
|
||||
String valueSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "value_split");
|
||||
String trimKey = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_key");
|
||||
String trimValue = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_value");
|
||||
String prefix = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "prefix");
|
||||
boolean stripBrackets =
|
||||
ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "strip_brackets", false);
|
||||
Set<String> includeKeys = null;
|
||||
Set<String> excludeKeys = null;
|
||||
List<String> includeKeysList = ConfigurationUtils.readOptionalList(TYPE, processorTag, config, "include_keys");
|
||||
|
@ -143,7 +221,10 @@ public final class KeyValueProcessor extends AbstractProcessor {
|
|||
excludeKeys = Collections.unmodifiableSet(Sets.newHashSet(excludeKeysList));
|
||||
}
|
||||
boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
|
||||
return new KeyValueProcessor(processorTag, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing);
|
||||
return new KeyValueProcessor(
|
||||
processorTag, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing,
|
||||
trimKey, trimValue, stripBrackets, prefix
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,9 +35,13 @@ public final class LowercaseProcessor extends AbstractStringProcessor {
|
|||
super(processorTag, field, ignoreMissing, targetField);
|
||||
}
|
||||
|
||||
public static String apply(String value) {
|
||||
return value.toLowerCase(Locale.ROOT);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String process(String value) {
|
||||
return value.toLowerCase(Locale.ROOT);
|
||||
return apply(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -17,23 +17,33 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.settings;
|
||||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import org.elasticsearch.common.inject.BindingAnnotation;
|
||||
import java.util.Map;
|
||||
|
||||
import java.lang.annotation.Documented;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.Target;
|
||||
public final class Processors {
|
||||
|
||||
import static java.lang.annotation.ElementType.FIELD;
|
||||
import static java.lang.annotation.ElementType.PARAMETER;
|
||||
import static java.lang.annotation.RetentionPolicy.RUNTIME;
|
||||
public static long bytes(String value) {
|
||||
return BytesProcessor.apply(value);
|
||||
}
|
||||
|
||||
public static String lowercase(String value) {
|
||||
return LowercaseProcessor.apply(value);
|
||||
}
|
||||
|
||||
@BindingAnnotation
|
||||
@Target({FIELD, PARAMETER})
|
||||
@Retention(RUNTIME)
|
||||
@Documented
|
||||
public @interface IndexDynamicSettings {
|
||||
public static String uppercase(String value) {
|
||||
return UppercaseProcessor.apply(value);
|
||||
}
|
||||
|
||||
public static Object json(Object fieldValue) {
|
||||
return JsonProcessor.apply(fieldValue);
|
||||
}
|
||||
|
||||
public static void json(Map<String, Object> ctx, String field) {
|
||||
JsonProcessor.apply(ctx, field);
|
||||
}
|
||||
|
||||
public static String urlDecode(String value) {
|
||||
return URLDecodeProcessor.apply(value);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import org.elasticsearch.painless.spi.PainlessExtension;
|
||||
import org.elasticsearch.painless.spi.Whitelist;
|
||||
import org.elasticsearch.painless.spi.WhitelistLoader;
|
||||
import org.elasticsearch.script.IngestScript;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class ProcessorsWhitelistExtension implements PainlessExtension {
|
||||
|
||||
private static final Whitelist WHITELIST =
|
||||
WhitelistLoader.loadFromResourceFiles(ProcessorsWhitelistExtension.class, "processors_whitelist.txt");
|
||||
|
||||
@Override
|
||||
public Map<ScriptContext<?>, List<Whitelist>> getContextWhitelists() {
|
||||
return Collections.singletonMap(IngestScript.CONTEXT, Collections.singletonList(WHITELIST));
|
||||
}
|
||||
}
|
|
@ -34,15 +34,19 @@ public final class URLDecodeProcessor extends AbstractStringProcessor {
|
|||
super(processorTag, field, ignoreMissing, targetField);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String process(String value) {
|
||||
public static String apply(String value) {
|
||||
try {
|
||||
return URLDecoder.decode(value, "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new IllegalArgumentException("could not URL-decode field[" + getField() + "]", e);
|
||||
throw new IllegalArgumentException("Could not URL-decode value.", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String process(String value) {
|
||||
return apply(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return TYPE;
|
||||
|
|
|
@ -34,9 +34,13 @@ public final class UppercaseProcessor extends AbstractStringProcessor {
|
|||
super(processorTag, field, ignoreMissing, targetField);
|
||||
}
|
||||
|
||||
public static String apply(String value) {
|
||||
return value.toUpperCase(Locale.ROOT);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String process(String value) {
|
||||
return value.toUpperCase(Locale.ROOT);
|
||||
return apply(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
org.elasticsearch.ingest.common.ProcessorsWhitelistExtension
|
|
@ -0,0 +1,29 @@
|
|||
#
|
||||
# Licensed to Elasticsearch under one or more contributor
|
||||
# license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright
|
||||
# ownership. Elasticsearch licenses this file to you under
|
||||
# the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
# This file contains a whitelist of static processor methods that can be accessed from painless
|
||||
|
||||
class org.elasticsearch.ingest.common.Processors {
|
||||
long bytes(String)
|
||||
String lowercase(String)
|
||||
String uppercase(String)
|
||||
Object json(Object)
|
||||
void json(Map, String)
|
||||
String urlDecode(String)
|
||||
}
|
|
@ -63,7 +63,7 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase {
|
|||
Processor processor = newProcessor(fieldName, randomBoolean(), fieldName);
|
||||
ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> processor.execute(ingestDocument));
|
||||
assertThat(exception.getMessage(),
|
||||
CoreMatchers.equalTo("failed to parse setting [" + fieldName + "] with value [8912pb] as a size in bytes"));
|
||||
CoreMatchers.equalTo("failed to parse setting [Ingest Field] with value [8912pb] as a size in bytes"));
|
||||
assertThat(exception.getCause().getMessage(),
|
||||
CoreMatchers.containsString("Values greater than 9223372036854775807 bytes are not supported"));
|
||||
}
|
||||
|
@ -93,6 +93,6 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase {
|
|||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(1126L));
|
||||
assertWarnings("Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " +
|
||||
"[" + fieldName + "]");
|
||||
"[Ingest Field]");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -146,7 +146,6 @@ public class JsonProcessorTests extends ESTestCase {
|
|||
assertThat(exception.getMessage(), equalTo("field [field] not present as part of path [field]"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testAddToRoot() throws Exception {
|
||||
String processorTag = randomAlphaOfLength(3);
|
||||
String randomTargetField = randomAlphaOfLength(2);
|
||||
|
|
|
@ -25,19 +25,25 @@ import org.elasticsearch.ingest.Processor;
|
|||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class KeyValueProcessorTests extends ESTestCase {
|
||||
|
||||
private static final KeyValueProcessor.Factory FACTORY = new KeyValueProcessor.Factory();
|
||||
|
||||
public void test() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, null, "target", false);
|
||||
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
||||
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
|
||||
|
@ -46,7 +52,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
|||
public void testRootTarget() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
||||
ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe");
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "myField", "&", "=", null, null,null, false);
|
||||
Processor processor = createKvProcessor("myField", "&", "=", null, null,null, false);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("first", String.class), equalTo("hello"));
|
||||
assertThat(ingestDocument.getFieldValue("second", List.class), equalTo(Arrays.asList("world", "universe")));
|
||||
|
@ -55,7 +61,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
|||
public void testKeySameAsSourceField() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
||||
ingestDocument.setFieldValue("first", "first=hello");
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "first", "&", "=", null, null,null, false);
|
||||
Processor processor = createKvProcessor("first", "&", "=", null, null,null, false);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("first", List.class), equalTo(Arrays.asList("first=hello", "hello")));
|
||||
}
|
||||
|
@ -63,7 +69,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
|||
public void testIncludeKeys() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=",
|
||||
Processor processor = createKvProcessor(fieldName, "&", "=",
|
||||
Sets.newHashSet("first"), null, "target", false);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
||||
|
@ -73,7 +79,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
|||
public void testExcludeKeys() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=",
|
||||
Processor processor = createKvProcessor(fieldName, "&", "=",
|
||||
null, Sets.newHashSet("second"), "target", false);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
||||
|
@ -84,7 +90,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
|||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument,
|
||||
"first=hello&second=world&second=universe&third=bar");
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=",
|
||||
Processor processor = createKvProcessor(fieldName, "&", "=",
|
||||
Sets.newHashSet("first", "second"), Sets.newHashSet("first", "second"), "target", false);
|
||||
processor.execute(ingestDocument);
|
||||
assertFalse(ingestDocument.hasField("target.first"));
|
||||
|
@ -92,9 +98,9 @@ public class KeyValueProcessorTests extends ESTestCase {
|
|||
assertFalse(ingestDocument.hasField("target.third"));
|
||||
}
|
||||
|
||||
public void testMissingField() {
|
||||
public void testMissingField() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "&",
|
||||
Processor processor = createKvProcessor("unknown", "&",
|
||||
"=", null, null, "target", false);
|
||||
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
|
||||
assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]"));
|
||||
|
@ -105,7 +111,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
|||
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(),
|
||||
Collections.singletonMap(fieldName, null));
|
||||
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "", "", null, null, "target", true);
|
||||
Processor processor = createKvProcessor(fieldName, "", "", null, null, "target", true);
|
||||
processor.execute(ingestDocument);
|
||||
assertIngestDocument(originalIngestDocument, ingestDocument);
|
||||
}
|
||||
|
@ -113,7 +119,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
|||
public void testNonExistentWithIgnoreMissing() throws Exception {
|
||||
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
|
||||
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "", "", null, null, "target", true);
|
||||
Processor processor = createKvProcessor("unknown", "", "", null, null, "target", true);
|
||||
processor.execute(ingestDocument);
|
||||
assertIngestDocument(originalIngestDocument, ingestDocument);
|
||||
}
|
||||
|
@ -121,7 +127,7 @@ public class KeyValueProcessorTests extends ESTestCase {
|
|||
public void testFailFieldSplitMatch() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello|second=world|second=universe");
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, null, "target", false);
|
||||
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello|second=world|second=universe"));
|
||||
assertFalse(ingestDocument.hasField("target.second"));
|
||||
|
@ -129,8 +135,94 @@ public class KeyValueProcessorTests extends ESTestCase {
|
|||
|
||||
public void testFailValueSplitMatch() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("foo", "bar"));
|
||||
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "foo", "&", "=", null, null, "target", false);
|
||||
Processor processor = createKvProcessor("foo", "&", "=", null, null, "target", false);
|
||||
Exception exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
|
||||
assertThat(exception.getMessage(), equalTo("field [foo] does not contain value_split [=]"));
|
||||
}
|
||||
|
||||
public void testTrimKeyAndValue() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first= hello &second=world& second =universe");
|
||||
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, " ", " ", false, null);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
||||
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
|
||||
}
|
||||
|
||||
public void testTrimMultiCharSequence() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument,
|
||||
"to=<foo@example.com>, orig_to=<bar@example.com>, %+relay=mail.example.com[private/dovecot-lmtp]," +
|
||||
" delay=2.2, delays=1.9/0.01/0.01/0.21, dsn=2.0.0, status=sent "
|
||||
);
|
||||
Processor processor = createKvProcessor(fieldName, " ", "=", null, null, "target", false, "%+", "<>,", false, null);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target.to", String.class), equalTo("foo@example.com"));
|
||||
assertThat(ingestDocument.getFieldValue("target.orig_to", String.class), equalTo("bar@example.com"));
|
||||
assertThat(ingestDocument.getFieldValue("target.relay", String.class), equalTo("mail.example.com[private/dovecot-lmtp]"));
|
||||
assertThat(ingestDocument.getFieldValue("target.delay", String.class), equalTo("2.2"));
|
||||
assertThat(ingestDocument.getFieldValue("target.delays", String.class), equalTo("1.9/0.01/0.01/0.21"));
|
||||
assertThat(ingestDocument.getFieldValue("target.dsn", String.class), equalTo("2.0.0"));
|
||||
assertThat(ingestDocument.getFieldValue("target.status", String.class), equalTo("sent"));
|
||||
}
|
||||
|
||||
public void testStripBrackets() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
String fieldName = RandomDocumentPicks.addRandomField(
|
||||
random(), ingestDocument, "first=<hello>&second=\"world\"&second=(universe)&third=<foo>&fourth=[bar]&fifth='last'"
|
||||
);
|
||||
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, true, null);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
|
||||
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
|
||||
assertThat(ingestDocument.getFieldValue("target.third", String.class), equalTo("foo"));
|
||||
assertThat(ingestDocument.getFieldValue("target.fourth", String.class), equalTo("bar"));
|
||||
assertThat(ingestDocument.getFieldValue("target.fifth", String.class), equalTo("last"));
|
||||
}
|
||||
|
||||
public void testAddPrefix() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
|
||||
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, false, "arg_");
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue("target.arg_first", String.class), equalTo("hello"));
|
||||
assertThat(ingestDocument.getFieldValue("target.arg_second", List.class), equalTo(Arrays.asList("world", "universe")));
|
||||
}
|
||||
|
||||
private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
|
||||
Set<String> excludeKeys, String targetField,
|
||||
boolean ignoreMissing) throws Exception {
|
||||
return createKvProcessor(
|
||||
field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing, null, null, false, null
|
||||
);
|
||||
}
|
||||
|
||||
private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
|
||||
Set<String> excludeKeys, String targetField, boolean ignoreMissing,
|
||||
String trimKey, String trimValue, boolean stripBrackets,
|
||||
String prefix) throws Exception {
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", field);
|
||||
config.put("field_split", fieldSplit);
|
||||
config.put("value_split", valueSplit);
|
||||
config.put("target_field", targetField);
|
||||
if (includeKeys != null) {
|
||||
config.put("include_keys", new ArrayList<>(includeKeys));
|
||||
}
|
||||
if (excludeKeys != null) {
|
||||
config.put("exclude_keys", new ArrayList<>(excludeKeys));
|
||||
}
|
||||
config.put("ignore_missing", ignoreMissing);
|
||||
if (trimKey != null) {
|
||||
config.put("trim_key", trimKey);
|
||||
}
|
||||
if (trimValue != null) {
|
||||
config.put("trim_value", trimValue);
|
||||
}
|
||||
config.put("strip_brackets", stripBrackets);
|
||||
if (prefix != null) {
|
||||
config.put("prefix", prefix);
|
||||
}
|
||||
return FACTORY.create(null, randomAlphaOfLength(10), config);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,216 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test invoke bytes processor":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"script" : {
|
||||
"lang": "painless",
|
||||
"source" : "ctx.target_field = Processors.bytes(ctx.source_field)"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
pipeline: "my_pipeline"
|
||||
body: {source_field: "1kb"}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.source_field: "1kb" }
|
||||
- match: { _source.target_field: 1024 }
|
||||
|
||||
---
|
||||
"Test invoke lowercase processor":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"script" : {
|
||||
"lang": "painless",
|
||||
"source" : "ctx.target_field = Processors.lowercase(ctx.source_field)"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
pipeline: "my_pipeline"
|
||||
body: {source_field: "FooBar"}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.source_field: "FooBar" }
|
||||
- match: { _source.target_field: "foobar" }
|
||||
|
||||
---
|
||||
"Test invoke uppercase processor":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"script" : {
|
||||
"lang": "painless",
|
||||
"source" : "ctx.target_field = Processors.uppercase(ctx.source_field)"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
pipeline: "my_pipeline"
|
||||
body: {source_field: "FooBar"}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.source_field: "FooBar" }
|
||||
- match: { _source.target_field: "FOOBAR" }
|
||||
|
||||
---
|
||||
"Test invoke json processor, assign to field":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"script" : {
|
||||
"lang": "painless",
|
||||
"source" : "ctx.target_field = Processors.json(ctx.source_field)"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
pipeline: "my_pipeline"
|
||||
body: {source_field: "{\"foo\":\"bar\"}"}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.source_field: "{\"foo\":\"bar\"}" }
|
||||
- match: { _source.target_field.foo: "bar" }
|
||||
|
||||
---
|
||||
"Test invoke json processor, assign to root":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"script" : {
|
||||
"lang": "painless",
|
||||
"source" : "Processors.json(ctx, 'source_field')"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
pipeline: "my_pipeline"
|
||||
body: {source_field: "{\"foo\":\"bar\"}"}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.source_field: "{\"foo\":\"bar\"}" }
|
||||
- match: { _source.foo: "bar" }
|
||||
|
||||
---
|
||||
"Test invoke urlDecode processor":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "my_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "_description",
|
||||
"processors": [
|
||||
{
|
||||
"script" : {
|
||||
"lang": "painless",
|
||||
"source" : "ctx.target_field = Processors.urlDecode(ctx.source_field)"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
pipeline: "my_pipeline"
|
||||
body: {source_field: "foo%20bar"}
|
||||
|
||||
- do:
|
||||
get:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
- match: { _source.source_field: "foo%20bar" }
|
||||
- match: { _source.target_field: "foo bar" }
|
|
@ -174,4 +174,4 @@ class org.elasticsearch.index.similarity.ScriptedSimilarity$Term {
|
|||
class org.elasticsearch.index.similarity.ScriptedSimilarity$Doc {
|
||||
int getLength()
|
||||
float getFreq()
|
||||
}
|
||||
}
|
|
@ -26,7 +26,7 @@ import java.util.Map;
|
|||
|
||||
public class InitializerTests extends ScriptTestCase {
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@SuppressWarnings({"rawtypes"})
|
||||
public void testArrayInitializers() {
|
||||
int[] ints = (int[])exec("new int[] {}");
|
||||
|
||||
|
@ -59,7 +59,7 @@ public class InitializerTests extends ScriptTestCase {
|
|||
assertEquals("aaaaaa", objects[3]);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@SuppressWarnings({"rawtypes"})
|
||||
public void testListInitializers() {
|
||||
List list = (List)exec("[]");
|
||||
|
||||
|
@ -91,7 +91,7 @@ public class InitializerTests extends ScriptTestCase {
|
|||
assertEquals("aaaaaa", list.get(3));
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
@SuppressWarnings({"rawtypes"})
|
||||
public void testMapInitializers() {
|
||||
Map map = (Map)exec("[:]");
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
{
|
||||
"index_patterns": [
|
||||
"filebeat-6.0.0-*"
|
||||
],
|
||||
"index_patterns": ["filebeat-6.0.0-*"],
|
||||
"mappings": {
|
||||
"doc": {
|
||||
"_meta": {
|
||||
|
@ -67,12 +65,14 @@
|
|||
"type": "keyword"
|
||||
},
|
||||
"country_iso_code": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"location": {
|
||||
"type": "geo_point"
|
||||
},
|
||||
"region_iso_code": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"region_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
|
|
|
@ -102,8 +102,8 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(queryId);
|
||||
builder.field(QUALITY_LEVEL_FIELD.getPreferredName(), this.evaluationResult);
|
||||
builder.startArray(UNKNOWN_DOCS_FIELD.getPreferredName());
|
||||
for (DocumentKey key : EvaluationMetric.filterUnknownDocuments(ratedHits)) {
|
||||
builder.startArray(UNRATED_DOCS_FIELD.getPreferredName());
|
||||
for (DocumentKey key : EvaluationMetric.filterUnratedDocuments(ratedHits)) {
|
||||
builder.startObject();
|
||||
builder.field(RatedDocument.INDEX_FIELD.getPreferredName(), key.getIndex());
|
||||
builder.field(RatedDocument.DOC_ID_FIELD.getPreferredName(), key.getDocId());
|
||||
|
@ -123,7 +123,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
|
|||
}
|
||||
|
||||
private static final ParseField QUALITY_LEVEL_FIELD = new ParseField("quality_level");
|
||||
private static final ParseField UNKNOWN_DOCS_FIELD = new ParseField("unknown_docs");
|
||||
private static final ParseField UNRATED_DOCS_FIELD = new ParseField("unrated_docs");
|
||||
private static final ParseField HITS_FIELD = new ParseField("hits");
|
||||
private static final ParseField METRIC_DETAILS_FIELD = new ParseField("metric_details");
|
||||
private static final ObjectParser<ParsedEvalQueryQuality, Void> PARSER = new ObjectParser<>("eval_query_quality",
|
||||
|
|
|
@ -76,10 +76,9 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable {
|
|||
/**
|
||||
* filter @link {@link RatedSearchHit} that don't have a rating
|
||||
*/
|
||||
static List<DocumentKey> filterUnknownDocuments(List<RatedSearchHit> ratedHits) {
|
||||
List<DocumentKey> unknownDocs = ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false)
|
||||
static List<DocumentKey> filterUnratedDocuments(List<RatedSearchHit> ratedHits) {
|
||||
return ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false)
|
||||
.map(hit -> new DocumentKey(hit.getSearchHit().getIndex(), hit.getSearchHit().getId())).collect(Collectors.toList());
|
||||
return unknownDocs;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
|
||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments;
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
|
@ -128,7 +128,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
EvalQueryQuality result = dcg.evaluate("id", hits, rated);
|
||||
assertEquals(12.779642067948913, result.getQualityLevel(), DELTA);
|
||||
assertEquals(2, filterUnknownDocuments(result.getHitsAndRatings()).size());
|
||||
assertEquals(2, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||
|
||||
/**
|
||||
* Check with normalization: to get the maximal possible dcg, sort documents by
|
||||
|
@ -185,7 +185,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
|
||||
assertEquals(12.392789260714371, result.getQualityLevel(), DELTA);
|
||||
assertEquals(1, filterUnknownDocuments(result.getHitsAndRatings()).size());
|
||||
assertEquals(1, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||
|
||||
/**
|
||||
* Check with normalization: to get the maximal possible dcg, sort documents by
|
||||
|
@ -224,13 +224,13 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
|
||||
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
|
||||
assertEquals(0.0d, result.getQualityLevel(), DELTA);
|
||||
assertEquals(0, filterUnknownDocuments(result.getHitsAndRatings()).size());
|
||||
assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||
|
||||
// also check normalized
|
||||
dcg = new DiscountedCumulativeGain(true, null, 10);
|
||||
result = dcg.evaluate("id", hits, ratedDocs);
|
||||
assertEquals(0.0d, result.getQualityLevel(), DELTA);
|
||||
assertEquals(0, filterUnknownDocuments(result.getHitsAndRatings()).size());
|
||||
assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size());
|
||||
}
|
||||
|
||||
public void testParseFromXContent() throws IOException {
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.rankeval.RatedDocument.DocumentKey;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
|
@ -52,11 +51,6 @@ public class EvalQueryQualityTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public static EvalQueryQuality randomEvalQueryQuality() {
|
||||
List<DocumentKey> unknownDocs = new ArrayList<>();
|
||||
int numberOfUnknownDocs = randomInt(5);
|
||||
for (int i = 0; i < numberOfUnknownDocs; i++) {
|
||||
unknownDocs.add(new DocumentKey(randomAlphaOfLength(10), randomAlphaOfLength(10)));
|
||||
}
|
||||
int numberOfSearchHits = randomInt(5);
|
||||
List<RatedSearchHit> ratedHits = new ArrayList<>();
|
||||
for (int i = 0; i < numberOfSearchHits; i++) {
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.List;
|
|||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
|
||||
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
|
@ -120,7 +120,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||
for (Entry<String, EvalQueryQuality> entry : entrySet) {
|
||||
EvalQueryQuality quality = entry.getValue();
|
||||
if (entry.getKey() == "amsterdam_query") {
|
||||
assertEquals(2, filterUnknownDocuments(quality.getHitsAndRatings()).size());
|
||||
assertEquals(2, filterUnratedDocuments(quality.getHitsAndRatings()).size());
|
||||
List<RatedSearchHit> hitsAndRatings = quality.getHitsAndRatings();
|
||||
assertEquals(6, hitsAndRatings.size());
|
||||
for (RatedSearchHit hit : hitsAndRatings) {
|
||||
|
@ -133,7 +133,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
if (entry.getKey() == "berlin_query") {
|
||||
assertEquals(5, filterUnknownDocuments(quality.getHitsAndRatings()).size());
|
||||
assertEquals(5, filterUnratedDocuments(quality.getHitsAndRatings()).size());
|
||||
List<RatedSearchHit> hitsAndRatings = quality.getHitsAndRatings();
|
||||
assertEquals(6, hitsAndRatings.size());
|
||||
for (RatedSearchHit hit : hitsAndRatings) {
|
||||
|
|
|
@ -158,7 +158,7 @@ public class RankEvalResponseTests extends ESTestCase {
|
|||
" \"details\": {" +
|
||||
" \"coffee_query\": {" +
|
||||
" \"quality_level\": 0.1," +
|
||||
" \"unknown_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," +
|
||||
" \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," +
|
||||
" \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"123\",\"_score\":1.0}," +
|
||||
" \"rating\":5}," +
|
||||
" {\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"456\",\"_score\":1.0}," +
|
||||
|
|
|
@ -73,7 +73,7 @@ setup:
|
|||
|
||||
- match: { quality_level: 1}
|
||||
- match: { details.amsterdam_query.quality_level: 1.0}
|
||||
- match: { details.amsterdam_query.unknown_docs: [ {"_index": "foo", "_id": "doc4"}]}
|
||||
- match: { details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]}
|
||||
- match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 2, "docs_retrieved": 2}}
|
||||
|
||||
- length: { details.amsterdam_query.hits: 3}
|
||||
|
@ -85,7 +85,7 @@ setup:
|
|||
- is_false: details.amsterdam_query.hits.2.rating
|
||||
|
||||
- match: { details.berlin_query.quality_level: 1.0}
|
||||
- match: { details.berlin_query.unknown_docs: [ {"_index": "foo", "_id": "doc4"}]}
|
||||
- match: { details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]}
|
||||
- match: { details.berlin_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}}
|
||||
- length: { details.berlin_query.hits: 2}
|
||||
- match: { details.berlin_query.hits.0.hit._id: "doc1" }
|
||||
|
@ -155,9 +155,9 @@ setup:
|
|||
- gt: {details.amsterdam_query.quality_level: 0.333}
|
||||
- lt: {details.amsterdam_query.quality_level: 0.334}
|
||||
- match: {details.amsterdam_query.metric_details.mean_reciprocal_rank: {"first_relevant": 3}}
|
||||
- match: {details.amsterdam_query.unknown_docs: [ {"_index": "foo", "_id": "doc2"},
|
||||
- match: {details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc2"},
|
||||
{"_index": "foo", "_id": "doc3"} ]}
|
||||
- match: {details.berlin_query.quality_level: 0.5}
|
||||
- match: {details.berlin_query.metric_details.mean_reciprocal_rank: {"first_relevant": 2}}
|
||||
- match: {details.berlin_query.unknown_docs: [ {"_index": "foo", "_id": "doc1"}]}
|
||||
- match: {details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc1"}]}
|
||||
|
||||
|
|
|
@ -73,7 +73,7 @@
|
|||
- lt: {quality_level: 13.848264 }
|
||||
- gt: {details.dcg_query.quality_level: 13.848263}
|
||||
- lt: {details.dcg_query.quality_level: 13.848264}
|
||||
- match: {details.dcg_query.unknown_docs: [ ]}
|
||||
- match: {details.dcg_query.unrated_docs: [ ]}
|
||||
|
||||
# reverse the order in which the results are returned (less relevant docs first)
|
||||
|
||||
|
@ -100,7 +100,7 @@
|
|||
- lt: {quality_level: 10.299675}
|
||||
- gt: {details.dcg_query_reverse.quality_level: 10.299674}
|
||||
- lt: {details.dcg_query_reverse.quality_level: 10.299675}
|
||||
- match: {details.dcg_query_reverse.unknown_docs: [ ]}
|
||||
- match: {details.dcg_query_reverse.unrated_docs: [ ]}
|
||||
|
||||
# if we mix both, we should get the average
|
||||
|
||||
|
@ -138,7 +138,7 @@
|
|||
- lt: {quality_level: 12.073970}
|
||||
- gt: {details.dcg_query.quality_level: 13.848263}
|
||||
- lt: {details.dcg_query.quality_level: 13.848264}
|
||||
- match: {details.dcg_query.unknown_docs: [ ]}
|
||||
- match: {details.dcg_query.unrated_docs: [ ]}
|
||||
- gt: {details.dcg_query_reverse.quality_level: 10.299674}
|
||||
- lt: {details.dcg_query_reverse.quality_level: 10.299675}
|
||||
- match: {details.dcg_query_reverse.unknown_docs: [ ]}
|
||||
- match: {details.dcg_query_reverse.unrated_docs: [ ]}
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
|
||||
- match: { quality_level: 1}
|
||||
- match: { details.amsterdam_query.quality_level: 1.0}
|
||||
- match: { details.amsterdam_query.unknown_docs: [ ]}
|
||||
- match: { details.amsterdam_query.unrated_docs: [ ]}
|
||||
- match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}}
|
||||
|
||||
- is_true: failures.invalid_query
|
||||
|
|
|
@ -85,7 +85,7 @@ setup:
|
|||
}
|
||||
|
||||
- match: {quality_level: 0.9}
|
||||
- match: {details.amsterdam_query.unknown_docs.0._id: "6"}
|
||||
- match: {details.amsterdam_query.unrated_docs.0._id: "6"}
|
||||
|
||||
---
|
||||
"Test illegal request parts":
|
||||
|
|
|
@ -57,7 +57,6 @@ public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler<Upda
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOException {
|
||||
/*
|
||||
* Passing the search request through UpdateByQueryRequest first allows
|
||||
|
|
|
@ -90,7 +90,6 @@ public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase
|
|||
@Override
|
||||
protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException {
|
||||
final Netty4Transport t = (Netty4Transport) transport;
|
||||
@SuppressWarnings("unchecked")
|
||||
final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection;
|
||||
CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true);
|
||||
}
|
||||
|
|
|
@ -185,6 +185,16 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
|||
geoData.put("continent_name", continentName);
|
||||
}
|
||||
break;
|
||||
case REGION_ISO_CODE:
|
||||
// ISO 3166-2 code for country subdivisions.
|
||||
// See iso.org/iso-3166-country-codes.html
|
||||
String countryIso = country.getIsoCode();
|
||||
String subdivisionIso = subdivision.getIsoCode();
|
||||
if (countryIso != null && subdivisionIso != null) {
|
||||
String regionIsoCode = countryIso + "-" + subdivisionIso;
|
||||
geoData.put("region_iso_code", regionIsoCode);
|
||||
}
|
||||
break;
|
||||
case REGION_NAME:
|
||||
String subdivisionName = subdivision.getName();
|
||||
if (subdivisionName != null) {
|
||||
|
@ -300,8 +310,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
|||
|
||||
public static final class Factory implements Processor.Factory {
|
||||
static final Set<Property> DEFAULT_CITY_PROPERTIES = EnumSet.of(
|
||||
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME,
|
||||
Property.CITY_NAME, Property.LOCATION
|
||||
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_ISO_CODE,
|
||||
Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION
|
||||
);
|
||||
static final Set<Property> DEFAULT_COUNTRY_PROPERTIES = EnumSet.of(
|
||||
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE
|
||||
|
@ -377,6 +387,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
|||
COUNTRY_ISO_CODE,
|
||||
COUNTRY_NAME,
|
||||
CONTINENT_NAME,
|
||||
REGION_ISO_CODE,
|
||||
REGION_NAME,
|
||||
CITY_NAME,
|
||||
TIMEZONE,
|
||||
|
@ -386,7 +397,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
|||
|
||||
static final EnumSet<Property> ALL_CITY_PROPERTIES = EnumSet.of(
|
||||
Property.IP, Property.COUNTRY_ISO_CODE, Property.COUNTRY_NAME, Property.CONTINENT_NAME,
|
||||
Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE, Property.LOCATION
|
||||
Property.REGION_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE,
|
||||
Property.LOCATION
|
||||
);
|
||||
static final EnumSet<Property> ALL_COUNTRY_PROPERTIES = EnumSet.of(
|
||||
Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE
|
||||
|
|
|
@ -284,7 +284,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
|
|||
config1.put("properties", Collections.singletonList("invalid"));
|
||||
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config1));
|
||||
assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " +
|
||||
"COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]"));
|
||||
"COUNTRY_NAME, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]"));
|
||||
|
||||
Map<String, Object> config2 = new HashMap<>();
|
||||
config2.put("field", "_field");
|
||||
|
|
|
@ -117,11 +117,12 @@ public class GeoIpProcessorTests extends ESTestCase {
|
|||
assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(address));
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> geoData = (Map<String, Object>) ingestDocument.getSourceAndMetadata().get("target_field");
|
||||
assertThat(geoData.size(), equalTo(8));
|
||||
assertThat(geoData.size(), equalTo(9));
|
||||
assertThat(geoData.get("ip"), equalTo(address));
|
||||
assertThat(geoData.get("country_iso_code"), equalTo("US"));
|
||||
assertThat(geoData.get("country_name"), equalTo("United States"));
|
||||
assertThat(geoData.get("continent_name"), equalTo("North America"));
|
||||
assertThat(geoData.get("region_iso_code"), equalTo("US-FL"));
|
||||
assertThat(geoData.get("region_name"), equalTo("Florida"));
|
||||
assertThat(geoData.get("city_name"), equalTo("Hollywood"));
|
||||
assertThat(geoData.get("timezone"), equalTo("America/New_York"));
|
||||
|
|
|
@ -30,11 +30,12 @@
|
|||
type: test
|
||||
id: 1
|
||||
- match: { _source.field1: "128.101.101.101" }
|
||||
- length: { _source.geoip: 5 }
|
||||
- length: { _source.geoip: 6 }
|
||||
- match: { _source.geoip.city_name: "Minneapolis" }
|
||||
- match: { _source.geoip.country_iso_code: "US" }
|
||||
- match: { _source.geoip.location.lon: -93.2166 }
|
||||
- match: { _source.geoip.location.lat: 44.9759 }
|
||||
- match: { _source.geoip.region_iso_code: "US-MN" }
|
||||
- match: { _source.geoip.region_name: "Minnesota" }
|
||||
- match: { _source.geoip.continent_name: "North America" }
|
||||
|
||||
|
@ -54,7 +55,7 @@
|
|||
{
|
||||
"geoip" : {
|
||||
"field" : "field1",
|
||||
"properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_name", "continent_name"]
|
||||
"properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_iso_code", "region_name", "continent_name"]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -75,7 +76,7 @@
|
|||
type: test
|
||||
id: 1
|
||||
- match: { _source.field1: "128.101.101.101" }
|
||||
- length: { _source.geoip: 8 }
|
||||
- length: { _source.geoip: 9 }
|
||||
- match: { _source.geoip.city_name: "Minneapolis" }
|
||||
- match: { _source.geoip.country_iso_code: "US" }
|
||||
- match: { _source.geoip.ip: "128.101.101.101" }
|
||||
|
@ -83,6 +84,7 @@
|
|||
- match: { _source.geoip.location.lat: 44.9759 }
|
||||
- match: { _source.geoip.timezone: "America/Chicago" }
|
||||
- match: { _source.geoip.country_name: "United States" }
|
||||
- match: { _source.geoip.region_iso_code: "US-MN" }
|
||||
- match: { _source.geoip.region_name: "Minnesota" }
|
||||
- match: { _source.geoip.continent_name: "North America" }
|
||||
|
||||
|
@ -188,11 +190,12 @@
|
|||
type: test
|
||||
id: 2
|
||||
- match: { _source.field1: "128.101.101.101" }
|
||||
- length: { _source.geoip: 5 }
|
||||
- length: { _source.geoip: 6 }
|
||||
- match: { _source.geoip.city_name: "Minneapolis" }
|
||||
- match: { _source.geoip.country_iso_code: "US" }
|
||||
- match: { _source.geoip.location.lon: -93.2166 }
|
||||
- match: { _source.geoip.location.lat: 44.9759 }
|
||||
- match: { _source.geoip.region_iso_code: "US-MN" }
|
||||
- match: { _source.geoip.region_name: "Minnesota" }
|
||||
- match: { _source.geoip.continent_name: "North America" }
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ import io.netty.handler.codec.http.HttpResponseDecoder;
|
|||
import io.netty.handler.codec.http.HttpResponseStatus;
|
||||
import io.netty.handler.codec.http.HttpUtil;
|
||||
import io.netty.handler.codec.http.HttpVersion;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
@ -89,7 +90,6 @@ public class HttpReadWriteHandlerTests extends ESTestCase {
|
|||
private final ResponseDecoder responseDecoder = new ResponseDecoder();
|
||||
|
||||
@Before
|
||||
@SuppressWarnings("unchecked")
|
||||
public void setMocks() {
|
||||
transport = mock(NioHttpServerTransport.class);
|
||||
Settings settings = Settings.EMPTY;
|
||||
|
|
|
@ -95,7 +95,6 @@ public class SimpleNioTransportTests extends AbstractSimpleTransportTestCase {
|
|||
|
||||
@Override
|
||||
protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException {
|
||||
@SuppressWarnings("unchecked")
|
||||
TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection;
|
||||
CloseableChannel.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true);
|
||||
}
|
||||
|
|
|
@ -19,9 +19,6 @@
|
|||
|
||||
package org.elasticsearch.upgrades;
|
||||
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.Request;
|
||||
|
@ -34,7 +31,6 @@ import org.elasticsearch.common.CheckedFunction;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.test.NotEqualMessageBuilder;
|
||||
|
@ -45,7 +41,6 @@ import org.junit.Before;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -142,8 +137,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
||||
Request createIndex = new Request("PUT", "/" + index);
|
||||
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||
client().performRequest(createIndex);
|
||||
|
||||
count = randomIntBetween(2000, 3000);
|
||||
byte[] randomByteArray = new byte[16];
|
||||
|
@ -164,16 +160,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
count = countOfIndexedRandomDocuments();
|
||||
}
|
||||
|
||||
Map<String, String> params = new HashMap<>();
|
||||
params.put("timeout", "2m");
|
||||
params.put("wait_for_status", "green");
|
||||
params.put("wait_for_no_relocating_shards", "true");
|
||||
params.put("wait_for_events", "languid");
|
||||
Map<String, Object> healthRsp = toMap(client().performRequest("GET", "/_cluster/health/" + index, params));
|
||||
logger.info("health api response: {}", healthRsp);
|
||||
assertEquals("green", healthRsp.get("status"));
|
||||
assertFalse((Boolean) healthRsp.get("timed_out"));
|
||||
|
||||
ensureGreenLongWait(index);
|
||||
assertBasicSearchWorks(count);
|
||||
assertAllSearchWorks(count);
|
||||
assertBasicAggregationWorks();
|
||||
|
@ -205,8 +192,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
||||
Request createIndex = new Request("PUT", "/" + index);
|
||||
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||
client().performRequest(createIndex);
|
||||
|
||||
int numDocs = randomIntBetween(2000, 3000);
|
||||
indexRandomDocuments(numDocs, true, false, i -> {
|
||||
|
@ -215,33 +203,26 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
.endObject();
|
||||
});
|
||||
logger.info("Refreshing [{}]", index);
|
||||
client().performRequest("POST", "/" + index + "/_refresh");
|
||||
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
|
||||
} else {
|
||||
final int numReplicas = 1;
|
||||
final long startTime = System.currentTimeMillis();
|
||||
logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, index);
|
||||
String requestBody = "{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}";
|
||||
Response response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
|
||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request setNumberOfReplicas = new Request("PUT", "/" + index + "/_settings");
|
||||
setNumberOfReplicas.setJsonEntity("{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}");
|
||||
Response response = client().performRequest(setNumberOfReplicas);
|
||||
|
||||
Map<String, String> params = new HashMap<>();
|
||||
params.put("timeout", "2m");
|
||||
params.put("wait_for_status", "green");
|
||||
params.put("wait_for_no_relocating_shards", "true");
|
||||
params.put("wait_for_events", "languid");
|
||||
Map<String, Object> healthRsp = toMap(client().performRequest("GET", "/_cluster/health/" + index, params));
|
||||
assertEquals("green", healthRsp.get("status"));
|
||||
assertFalse((Boolean) healthRsp.get("timed_out"));
|
||||
ensureGreenLongWait(index);
|
||||
|
||||
logger.debug("--> index [{}] is green, took [{}] ms", index, (System.currentTimeMillis() - startTime));
|
||||
Map<String, Object> recoverRsp = toMap(client().performRequest("GET", "/" + index + "/_recovery"));
|
||||
Map<String, Object> recoverRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_recovery")));
|
||||
logger.debug("--> recovery status:\n{}", recoverRsp);
|
||||
|
||||
Set<Integer> counts = new HashSet<>();
|
||||
for (String node : dataNodes(index, client())) {
|
||||
Map<String, Object> responseBody = toMap(client().performRequest("GET", "/" + index + "/_search",
|
||||
Collections.singletonMap("preference", "_only_nodes:" + node)));
|
||||
Request search = new Request("GET", "/" + index + "/_search");
|
||||
search.addParameter("preference", "_only_nodes:" + node);
|
||||
Map<String, Object> responseBody = entityAsMap(client().performRequest(search));
|
||||
assertNoFailures(responseBody);
|
||||
int hits = (int) XContentMapValues.extractValue("hits.total", responseBody);
|
||||
counts.add(hits);
|
||||
|
@ -282,12 +263,13 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
||||
Request createIndex = new Request("PUT", "/" + index);
|
||||
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||
client().performRequest(createIndex);
|
||||
|
||||
String aliasName = "%23" + index; // %23 == #
|
||||
client().performRequest("PUT", "/" + index + "/_alias/" + aliasName);
|
||||
Response response = client().performRequest("HEAD", "/" + index + "/_alias/" + aliasName);
|
||||
client().performRequest(new Request("PUT", "/" + index + "/_alias/" + aliasName));
|
||||
Response response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + aliasName));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
|
||||
count = randomIntBetween(32, 128);
|
||||
|
@ -301,19 +283,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
count = countOfIndexedRandomDocuments();
|
||||
}
|
||||
|
||||
logger.error("clusterState=" + toMap(client().performRequest("GET", "/_cluster/state",
|
||||
Collections.singletonMap("metric", "metadata"))));
|
||||
Request request = new Request("GET", "/_cluster/state");
|
||||
request.addParameter("metric", "metadata");
|
||||
logger.error("clusterState=" + entityAsMap(client().performRequest(request)));
|
||||
// We can read from the alias just like we can read from the index.
|
||||
String aliasName = "%23" + index; // %23 == #
|
||||
Map<String, Object> searchRsp = toMap(client().performRequest("GET", "/" + aliasName + "/_search"));
|
||||
Map<String, Object> searchRsp = entityAsMap(client().performRequest(new Request("GET", "/" + aliasName + "/_search")));
|
||||
int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
||||
assertEquals(count, totalHits);
|
||||
if (runningAgainstOldCluster == false) {
|
||||
// We can remove the alias.
|
||||
Response response = client().performRequest("DELETE", "/" + index + "/_alias/" + aliasName);
|
||||
Response response = client().performRequest(new Request("DELETE", "/" + index + "/_alias/" + aliasName));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
// and check that it is gone:
|
||||
response = client().performRequest("HEAD", "/" + index + "/_alias/" + aliasName);
|
||||
response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + aliasName));
|
||||
assertEquals(404, response.getStatusLine().getStatusCode());
|
||||
}
|
||||
}
|
||||
|
@ -330,13 +313,14 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
client().performRequest("PUT", "/_template/template_1", Collections.emptyMap(),
|
||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
||||
client().performRequest("PUT", "/" + index);
|
||||
Request createTemplate = new Request("PUT", "/_template/template_1");
|
||||
createTemplate.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||
client().performRequest(createTemplate);
|
||||
client().performRequest(new Request("PUT", "/" + index));
|
||||
}
|
||||
|
||||
// verifying if we can still read some properties from cluster state api:
|
||||
Map<String, Object> clusterState = toMap(client().performRequest("GET", "/_cluster/state"));
|
||||
Map<String, Object> clusterState = entityAsMap(client().performRequest(new Request("GET", "/_cluster/state")));
|
||||
|
||||
// Check some global properties:
|
||||
String clusterName = (String) clusterState.get("cluster_name");
|
||||
|
@ -381,8 +365,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
||||
Request createIndex = new Request("PUT", "/" + index);
|
||||
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||
client().performRequest(createIndex);
|
||||
|
||||
numDocs = randomIntBetween(512, 1024);
|
||||
indexRandomDocuments(numDocs, true, true, i -> {
|
||||
|
@ -393,23 +378,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
|
||||
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
|
||||
|
||||
String updateSettingsRequestBody = "{\"settings\": {\"index.blocks.write\": true}}";
|
||||
Response rsp = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
|
||||
new StringEntity(updateSettingsRequestBody, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
||||
Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings");
|
||||
updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}");
|
||||
client().performRequest(updateSettingsRequest);
|
||||
|
||||
String shrinkIndexRequestBody = "{\"settings\": {\"index.number_of_shards\": 1}}";
|
||||
rsp = client().performRequest("PUT", "/" + index + "/_shrink/" + shrunkenIndex, Collections.emptyMap(),
|
||||
new StringEntity(shrinkIndexRequestBody, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
||||
Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex);
|
||||
shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}");
|
||||
client().performRequest(shrinkIndexRequest);
|
||||
|
||||
rsp = client().performRequest("POST", "/_refresh");
|
||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
} else {
|
||||
numDocs = countOfIndexedRandomDocuments();
|
||||
}
|
||||
|
||||
Map<?, ?> response = toMap(client().performRequest("GET", "/" + index + "/_search"));
|
||||
Map<?, ?> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
|
||||
assertNoFailures(response);
|
||||
int totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
||||
assertThat(totalShards, greaterThan(1));
|
||||
|
@ -418,7 +400,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
int totalHits = (int) XContentMapValues.extractValue("hits.total", response);
|
||||
assertEquals(numDocs, totalHits);
|
||||
|
||||
response = toMap(client().performRequest("GET", "/" + shrunkenIndex+ "/_search"));
|
||||
response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex+ "/_search")));
|
||||
assertNoFailures(response);
|
||||
totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
||||
assertEquals(1, totalShards);
|
||||
|
@ -448,8 +430,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
||||
Request createIndex = new Request("PUT", "/" + index);
|
||||
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||
client().performRequest(createIndex);
|
||||
|
||||
numDocs = randomIntBetween(512, 1024);
|
||||
indexRandomDocuments(numDocs, true, true, i -> {
|
||||
|
@ -460,23 +443,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
} else {
|
||||
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
|
||||
|
||||
String updateSettingsRequestBody = "{\"settings\": {\"index.blocks.write\": true}}";
|
||||
Response rsp = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
|
||||
new StringEntity(updateSettingsRequestBody, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
||||
Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings");
|
||||
updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}");
|
||||
client().performRequest(updateSettingsRequest);
|
||||
|
||||
String shrinkIndexRequestBody = "{\"settings\": {\"index.number_of_shards\": 1}}";
|
||||
rsp = client().performRequest("PUT", "/" + index + "/_shrink/" + shrunkenIndex, Collections.emptyMap(),
|
||||
new StringEntity(shrinkIndexRequestBody, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
||||
Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex);
|
||||
shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}");
|
||||
client().performRequest(shrinkIndexRequest);
|
||||
|
||||
numDocs = countOfIndexedRandomDocuments();
|
||||
}
|
||||
|
||||
Response rsp = client().performRequest("POST", "/_refresh");
|
||||
assertEquals(200, rsp.getStatusLine().getStatusCode());
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
|
||||
Map<?, ?> response = toMap(client().performRequest("GET", "/" + index + "/_search"));
|
||||
Map<?, ?> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
|
||||
assertNoFailures(response);
|
||||
int totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
||||
assertThat(totalShards, greaterThan(1));
|
||||
|
@ -486,7 +466,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
assertEquals(numDocs, totalHits);
|
||||
|
||||
if (runningAgainstOldCluster == false) {
|
||||
response = toMap(client().performRequest("GET", "/" + shrunkenIndex + "/_search"));
|
||||
response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex + "/_search")));
|
||||
assertNoFailures(response);
|
||||
totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
|
||||
assertEquals(1, totalShards);
|
||||
|
@ -499,43 +479,48 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
|
||||
void assertBasicSearchWorks(int count) throws IOException {
|
||||
logger.info("--> testing basic search");
|
||||
Map<String, Object> response = toMap(client().performRequest("GET", "/" + index + "/_search"));
|
||||
assertNoFailures(response);
|
||||
int numDocs = (int) XContentMapValues.extractValue("hits.total", response);
|
||||
logger.info("Found {} in old index", numDocs);
|
||||
assertEquals(count, numDocs);
|
||||
{
|
||||
Map<String, Object> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
|
||||
assertNoFailures(response);
|
||||
int numDocs = (int) XContentMapValues.extractValue("hits.total", response);
|
||||
logger.info("Found {} in old index", numDocs);
|
||||
assertEquals(count, numDocs);
|
||||
}
|
||||
|
||||
logger.info("--> testing basic search with sort");
|
||||
String searchRequestBody = "{ \"sort\": [{ \"int\" : \"asc\" }]}";
|
||||
response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
||||
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
|
||||
assertNoFailures(response);
|
||||
numDocs = (int) XContentMapValues.extractValue("hits.total", response);
|
||||
assertEquals(count, numDocs);
|
||||
{
|
||||
Request searchRequest = new Request("GET", "/" + index + "/_search");
|
||||
searchRequest.setJsonEntity("{ \"sort\": [{ \"int\" : \"asc\" }]}");
|
||||
Map<String, Object> response = entityAsMap(client().performRequest(searchRequest));
|
||||
assertNoFailures(response);
|
||||
assertTotalHits(count, response);
|
||||
}
|
||||
|
||||
logger.info("--> testing exists filter");
|
||||
searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"string\"} }}";
|
||||
response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
||||
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
|
||||
assertNoFailures(response);
|
||||
numDocs = (int) XContentMapValues.extractValue("hits.total", response);
|
||||
assertEquals(count, numDocs);
|
||||
{
|
||||
Request searchRequest = new Request("GET", "/" + index + "/_search");
|
||||
searchRequest.setJsonEntity("{ \"query\": { \"exists\" : {\"field\": \"string\"} }}");
|
||||
Map<String, Object> response = entityAsMap(client().performRequest(searchRequest));
|
||||
assertNoFailures(response);
|
||||
assertTotalHits(count, response);
|
||||
}
|
||||
|
||||
searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}";
|
||||
response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
||||
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
|
||||
assertNoFailures(response);
|
||||
numDocs = (int) XContentMapValues.extractValue("hits.total", response);
|
||||
assertEquals(count, numDocs);
|
||||
logger.info("--> testing field with dots in the name");
|
||||
{
|
||||
Request searchRequest = new Request("GET", "/" + index + "/_search");
|
||||
searchRequest.setJsonEntity("{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}");
|
||||
Map<String, Object> response = entityAsMap(client().performRequest(searchRequest));
|
||||
assertNoFailures(response);
|
||||
assertTotalHits(count, response);
|
||||
}
|
||||
}
|
||||
|
||||
void assertAllSearchWorks(int count) throws IOException {
|
||||
logger.info("--> testing _all search");
|
||||
Map<String, Object> searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search"));
|
||||
assertNoFailures(searchRsp);
|
||||
int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
||||
assertEquals(count, totalHits);
|
||||
Map<?, ?> bestHit = (Map<?, ?>) ((List<?>)(XContentMapValues.extractValue("hits.hits", searchRsp))).get(0);
|
||||
Map<String, Object> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
|
||||
assertNoFailures(response);
|
||||
assertTotalHits(count, response);
|
||||
Map<?, ?> bestHit = (Map<?, ?>) ((List<?>) (XContentMapValues.extractValue("hits.hits", response))).get(0);
|
||||
|
||||
// Make sure there are payloads and they are taken into account for the score
|
||||
// the 'string' field has a boost of 4 in the mappings so it should get a payload boost
|
||||
|
@ -543,82 +528,77 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
assertNotNull(stringValue);
|
||||
String type = (String) bestHit.get("_type");
|
||||
String id = (String) bestHit.get("_id");
|
||||
String requestBody = "{ \"query\": { \"match_all\" : {} }}";
|
||||
String explanation = toStr(client().performRequest("GET", "/" + index + "/" + type + "/" + id,
|
||||
Collections.emptyMap(), new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
||||
Request explanationRequest = new Request("GET", "/" + index + "/" + type + "/" + id + "/_explain");
|
||||
explanationRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}");
|
||||
String explanation = toStr(client().performRequest(explanationRequest));
|
||||
assertFalse("Could not find payload boost in explanation\n" + explanation, explanation.contains("payloadBoost"));
|
||||
|
||||
// Make sure the query can run on the whole index
|
||||
searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search",
|
||||
Collections.singletonMap("explain", "true"), new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
||||
assertNoFailures(searchRsp);
|
||||
totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
||||
assertEquals(count, totalHits);
|
||||
Request searchRequest = new Request("GET", "/" + index + "/_search");
|
||||
searchRequest.setEntity(explanationRequest.getEntity());
|
||||
searchRequest.addParameter("explain", "true");
|
||||
Map<?, ?> matchAllResponse = entityAsMap(client().performRequest(searchRequest));
|
||||
assertNoFailures(matchAllResponse);
|
||||
assertTotalHits(count, matchAllResponse);
|
||||
}
|
||||
|
||||
void assertBasicAggregationWorks() throws IOException {
|
||||
// histogram on a long
|
||||
String requestBody = "{ \"aggs\": { \"histo\" : {\"histogram\" : {\"field\": \"int\", \"interval\": 10}} }}";
|
||||
Map<?, ?> searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
||||
assertNoFailures(searchRsp);
|
||||
List<?> histoBuckets = (List<?>) XContentMapValues.extractValue("aggregations.histo.buckets", searchRsp);
|
||||
long totalCount = 0;
|
||||
Request longHistogramRequest = new Request("GET", "/" + index + "/_search");
|
||||
longHistogramRequest.setJsonEntity("{ \"aggs\": { \"histo\" : {\"histogram\" : {\"field\": \"int\", \"interval\": 10}} }}");
|
||||
Map<?, ?> longHistogram = entityAsMap(client().performRequest(longHistogramRequest));
|
||||
assertNoFailures(longHistogram);
|
||||
List<?> histoBuckets = (List<?>) XContentMapValues.extractValue("aggregations.histo.buckets", longHistogram);
|
||||
int histoCount = 0;
|
||||
for (Object entry : histoBuckets) {
|
||||
Map<?, ?> bucket = (Map<?, ?>) entry;
|
||||
totalCount += (Integer) bucket.get("doc_count");
|
||||
histoCount += (Integer) bucket.get("doc_count");
|
||||
}
|
||||
int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
||||
assertEquals(totalHits, totalCount);
|
||||
assertTotalHits(histoCount, longHistogram);
|
||||
|
||||
// terms on a boolean
|
||||
requestBody = "{ \"aggs\": { \"bool_terms\" : {\"terms\" : {\"field\": \"bool\"}} }}";
|
||||
searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
||||
List<?> termsBuckets = (List<?>) XContentMapValues.extractValue("aggregations.bool_terms.buckets", searchRsp);
|
||||
totalCount = 0;
|
||||
Request boolTermsRequest = new Request("GET", "/" + index + "/_search");
|
||||
boolTermsRequest.setJsonEntity("{ \"aggs\": { \"bool_terms\" : {\"terms\" : {\"field\": \"bool\"}} }}");
|
||||
Map<?, ?> boolTerms = entityAsMap(client().performRequest(boolTermsRequest));
|
||||
List<?> termsBuckets = (List<?>) XContentMapValues.extractValue("aggregations.bool_terms.buckets", boolTerms);
|
||||
int termsCount = 0;
|
||||
for (Object entry : termsBuckets) {
|
||||
Map<?, ?> bucket = (Map<?, ?>) entry;
|
||||
totalCount += (Integer) bucket.get("doc_count");
|
||||
termsCount += (Integer) bucket.get("doc_count");
|
||||
}
|
||||
totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
|
||||
assertEquals(totalHits, totalCount);
|
||||
assertTotalHits(termsCount, boolTerms);
|
||||
}
|
||||
|
||||
void assertRealtimeGetWorks() throws IOException {
|
||||
String requestBody = "{ \"index\": { \"refresh_interval\" : -1 }}";
|
||||
Response response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
|
||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request disableAutoRefresh = new Request("PUT", "/" + index + "/_settings");
|
||||
disableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : -1 }}");
|
||||
client().performRequest(disableAutoRefresh);
|
||||
|
||||
requestBody = "{ \"query\": { \"match_all\" : {} }}";
|
||||
Map<String, Object> searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
|
||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
||||
Map<?, ?> hit = (Map<?, ?>) ((List<?>)(XContentMapValues.extractValue("hits.hits", searchRsp))).get(0);
|
||||
Request searchRequest = new Request("GET", "/" + index + "/_search");
|
||||
searchRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}");
|
||||
Map<?, ?> searchResponse = entityAsMap(client().performRequest(searchRequest));
|
||||
Map<?, ?> hit = (Map<?, ?>) ((List<?>)(XContentMapValues.extractValue("hits.hits", searchResponse))).get(0);
|
||||
String docId = (String) hit.get("_id");
|
||||
|
||||
requestBody = "{ \"doc\" : { \"foo\": \"bar\"}}";
|
||||
response = client().performRequest("POST", "/" + index + "/doc/" + docId + "/_update", Collections.emptyMap(),
|
||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request updateRequest = new Request("POST", "/" + index + "/doc/" + docId + "/_update");
|
||||
updateRequest.setJsonEntity("{ \"doc\" : { \"foo\": \"bar\"}}");
|
||||
client().performRequest(updateRequest);
|
||||
|
||||
Map<String, Object> getRsp = toMap(client().performRequest("GET", "/" + index + "/doc/" + docId));
|
||||
Map<String, Object> getRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/doc/" + docId)));
|
||||
Map<?, ?> source = (Map<?, ?>) getRsp.get("_source");
|
||||
assertTrue("doc does not contain 'foo' key: " + source, source.containsKey("foo"));
|
||||
|
||||
requestBody = "{ \"index\": { \"refresh_interval\" : \"1s\" }}";
|
||||
response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
|
||||
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request enableAutoRefresh = new Request("PUT", "/" + index + "/_settings");
|
||||
enableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : \"1s\" }}");
|
||||
client().performRequest(enableAutoRefresh);
|
||||
}
|
||||
|
||||
void assertStoredBinaryFields(int count) throws Exception {
|
||||
String requestBody = "{ \"query\": { \"match_all\" : {} }, \"size\": 100, \"stored_fields\": \"binary\"}";
|
||||
Map<String, Object> rsp = toMap(client().performRequest("GET", "/" + index + "/_search",
|
||||
Collections.emptyMap(), new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
|
||||
Request request = new Request("GET", "/" + index + "/_search");
|
||||
request.setJsonEntity("{ \"query\": { \"match_all\" : {} }, \"size\": 100, \"stored_fields\": \"binary\"}");
|
||||
Map<String, Object> rsp = entityAsMap(client().performRequest(request));
|
||||
|
||||
int totalCount = (Integer) XContentMapValues.extractValue("hits.total", rsp);
|
||||
assertEquals(count, totalCount);
|
||||
assertTotalHits(count, rsp);
|
||||
List<?> hits = (List<?>) XContentMapValues.extractValue("hits.hits", rsp);
|
||||
assertEquals(100, hits.size());
|
||||
for (Object hit : hits) {
|
||||
|
@ -631,14 +611,6 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
static Map<String, Object> toMap(Response response) throws IOException {
|
||||
return toMap(EntityUtils.toString(response.getEntity()));
|
||||
}
|
||||
|
||||
static Map<String, Object> toMap(String response) throws IOException {
|
||||
return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false);
|
||||
}
|
||||
|
||||
static String toStr(Response response) throws IOException {
|
||||
return EntityUtils.toString(response.getEntity());
|
||||
}
|
||||
|
@ -648,6 +620,11 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
assertEquals(0, failed);
|
||||
}
|
||||
|
||||
static void assertTotalHits(int expectedTotalHits, Map<?, ?> response) {
|
||||
int actualTotalHits = (Integer) XContentMapValues.extractValue("hits.total", response);
|
||||
assertEquals(expectedTotalHits, actualTotalHits);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests that a single document survives. Super basic smoke test.
|
||||
*/
|
||||
|
@ -656,11 +633,12 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
String doc = "{\"test\": \"test\"}";
|
||||
|
||||
if (runningAgainstOldCluster) {
|
||||
client().performRequest("PUT", docLocation, singletonMap("refresh", "true"),
|
||||
new StringEntity(doc, ContentType.APPLICATION_JSON));
|
||||
Request createDoc = new Request("PUT", docLocation);
|
||||
createDoc.setJsonEntity(doc);
|
||||
client().performRequest(createDoc);
|
||||
}
|
||||
|
||||
assertThat(toStr(client().performRequest("GET", docLocation)), containsString(doc));
|
||||
assertThat(toStr(client().performRequest(new Request("GET", docLocation))), containsString(doc));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -733,16 +711,18 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
}
|
||||
|
||||
// Count the documents in the index to make sure we have as many as we put there
|
||||
String countResponse = toStr(client().performRequest("GET", "/" + index + "/_search", singletonMap("size", "0")));
|
||||
Request countRequest = new Request("GET", "/" + index + "/_search");
|
||||
countRequest.addParameter("size", "0");
|
||||
String countResponse = toStr(client().performRequest(countRequest));
|
||||
assertThat(countResponse, containsString("\"total\":" + count));
|
||||
|
||||
if (false == runningAgainstOldCluster) {
|
||||
boolean restoredFromTranslog = false;
|
||||
boolean foundPrimary = false;
|
||||
Map<String, String> params = new HashMap<>();
|
||||
params.put("h", "index,shard,type,stage,translog_ops_recovered");
|
||||
params.put("s", "index,shard,type");
|
||||
String recoveryResponse = toStr(client().performRequest("GET", "/_cat/recovery/" + index, params));
|
||||
Request recoveryRequest = new Request("GET", "/_cat/recovery/" + index);
|
||||
recoveryRequest.addParameter("h", "index,shard,type,stage,translog_ops_recovered");
|
||||
recoveryRequest.addParameter("s", "index,shard,type");
|
||||
String recoveryResponse = toStr(client().performRequest(recoveryRequest));
|
||||
for (String line : recoveryResponse.split("\n")) {
|
||||
// Find the primaries
|
||||
foundPrimary = true;
|
||||
|
@ -768,11 +748,10 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) {
|
||||
int numCurrentVersion = 0;
|
||||
int numBwcVersion = 0;
|
||||
params.clear();
|
||||
params.put("h", "prirep,shard,index,version");
|
||||
params.put("s", "prirep,shard,index");
|
||||
String segmentsResponse = toStr(
|
||||
client().performRequest("GET", "/_cat/segments/" + index, params));
|
||||
Request segmentsRequest = new Request("GET", "/_cat/segments/" + index);
|
||||
segmentsRequest.addParameter("h", "prirep,shard,index,version");
|
||||
segmentsRequest.addParameter("s", "prirep,shard,index");
|
||||
String segmentsResponse = toStr(client().performRequest(segmentsRequest));
|
||||
for (String line : segmentsResponse.split("\n")) {
|
||||
if (false == line.startsWith("p")) {
|
||||
continue;
|
||||
|
@ -817,14 +796,16 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
refresh();
|
||||
|
||||
// Count the documents in the index to make sure we have as many as we put there
|
||||
String countResponse = toStr(client().performRequest("GET", "/" + index + "/_search", singletonMap("size", "0")));
|
||||
Request countRequest = new Request("GET", "/" + index + "/_search");
|
||||
countRequest.addParameter("size", "0");
|
||||
String countResponse = toStr(client().performRequest(countRequest));
|
||||
assertThat(countResponse, containsString("\"total\":" + count));
|
||||
|
||||
// Stick a routing attribute into to cluster settings so we can see it after the restore
|
||||
HttpEntity routingSetting = new StringEntity(
|
||||
"{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}",
|
||||
ContentType.APPLICATION_JSON);
|
||||
client().performRequest("PUT", "/_cluster/settings", emptyMap(), routingSetting);
|
||||
Request addRoutingSettings = new Request("PUT", "/_cluster/settings");
|
||||
addRoutingSettings.setJsonEntity(
|
||||
"{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}");
|
||||
client().performRequest(addRoutingSettings);
|
||||
|
||||
// Stick a template into the cluster so we can see it after the restore
|
||||
XContentBuilder templateBuilder = JsonXContent.contentBuilder().startObject();
|
||||
|
@ -857,8 +838,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
templateBuilder.endObject();
|
||||
}
|
||||
templateBuilder.endObject().endObject();
|
||||
client().performRequest("PUT", "/_template/test_template", emptyMap(),
|
||||
new StringEntity(Strings.toString(templateBuilder), ContentType.APPLICATION_JSON));
|
||||
Request createTemplateRequest = new Request("PUT", "/_template/test_template");
|
||||
createTemplateRequest.setJsonEntity(Strings.toString(templateBuilder));
|
||||
client().performRequest(createTemplateRequest);
|
||||
|
||||
if (runningAgainstOldCluster) {
|
||||
// Create the repo
|
||||
|
@ -871,13 +853,15 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
repoConfig.endObject();
|
||||
}
|
||||
repoConfig.endObject();
|
||||
client().performRequest("PUT", "/_snapshot/repo", emptyMap(),
|
||||
new StringEntity(Strings.toString(repoConfig), ContentType.APPLICATION_JSON));
|
||||
Request createRepoRequest = new Request("PUT", "/_snapshot/repo");
|
||||
createRepoRequest.setJsonEntity(Strings.toString(repoConfig));
|
||||
client().performRequest(createRepoRequest);
|
||||
}
|
||||
|
||||
client().performRequest("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"),
|
||||
singletonMap("wait_for_completion", "true"),
|
||||
new StringEntity("{\"indices\": \"" + index + "\"}", ContentType.APPLICATION_JSON));
|
||||
Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"));
|
||||
createSnapshot.addParameter("wait_for_completion", "true");
|
||||
createSnapshot.setJsonEntity("{\"indices\": \"" + index + "\"}");
|
||||
client().performRequest(createSnapshot);
|
||||
|
||||
checkSnapshot("old_snap", count, oldClusterVersion);
|
||||
if (false == runningAgainstOldCluster) {
|
||||
|
@ -896,10 +880,13 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
mappingsAndSettings.endObject();
|
||||
}
|
||||
mappingsAndSettings.endObject();
|
||||
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
|
||||
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
|
||||
Request createIndex = new Request("PUT", "/" + index);
|
||||
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
|
||||
client().performRequest(createIndex);
|
||||
} else {
|
||||
Response response = client().performRequest("GET", index + "/_stats", singletonMap("level", "shards"));
|
||||
Request statsRequest = new Request("GET", index + "/_stats");
|
||||
statsRequest.addParameter("level", "shards");
|
||||
Response response = client().performRequest(statsRequest);
|
||||
List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0");
|
||||
String globalHistoryUUID = null;
|
||||
for (Object shard : shardStats) {
|
||||
|
@ -920,18 +907,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
|
||||
private void checkSnapshot(String snapshotName, int count, Version tookOnVersion) throws IOException {
|
||||
// Check the snapshot metadata, especially the version
|
||||
String response = toStr(client().performRequest("GET", "/_snapshot/repo/" + snapshotName, listSnapshotVerboseParams()));
|
||||
Map<String, Object> map = toMap(response);
|
||||
assertEquals(response, singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", map));
|
||||
assertEquals(response, singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", map));
|
||||
assertEquals(response, singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", map));
|
||||
Request listSnapshotRequest = new Request("GET", "/_snapshot/repo/" + snapshotName);
|
||||
if (false == (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0))) {
|
||||
listSnapshotRequest.addParameter("verbose", "true");
|
||||
}
|
||||
Map<String, Object> listSnapshotResponse = entityAsMap(client().performRequest(listSnapshotRequest));
|
||||
assertEquals(singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", listSnapshotResponse));
|
||||
assertEquals(singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", listSnapshotResponse));
|
||||
assertEquals(singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", listSnapshotResponse));
|
||||
|
||||
// Remove the routing setting and template so we can test restoring them.
|
||||
HttpEntity clearRoutingSetting = new StringEntity(
|
||||
"{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}",
|
||||
ContentType.APPLICATION_JSON);
|
||||
client().performRequest("PUT", "/_cluster/settings", emptyMap(), clearRoutingSetting);
|
||||
client().performRequest("DELETE", "/_template/test_template", emptyMap(), clearRoutingSetting);
|
||||
Request clearRoutingFromSettings = new Request("PUT", "/_cluster/settings");
|
||||
clearRoutingFromSettings.setJsonEntity("{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}");
|
||||
client().performRequest(clearRoutingFromSettings);
|
||||
client().performRequest(new Request("DELETE", "/_template/test_template"));
|
||||
|
||||
// Restore
|
||||
XContentBuilder restoreCommand = JsonXContent.contentBuilder().startObject();
|
||||
|
@ -940,11 +929,15 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
restoreCommand.field("rename_pattern", index);
|
||||
restoreCommand.field("rename_replacement", "restored_" + index);
|
||||
restoreCommand.endObject();
|
||||
client().performRequest("POST", "/_snapshot/repo/" + snapshotName + "/_restore", singletonMap("wait_for_completion", "true"),
|
||||
new StringEntity(Strings.toString(restoreCommand), ContentType.APPLICATION_JSON));
|
||||
Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshotName + "/_restore");
|
||||
restoreRequest.addParameter("wait_for_completion", "true");
|
||||
restoreRequest.setJsonEntity(Strings.toString(restoreCommand));
|
||||
client().performRequest(restoreRequest);
|
||||
|
||||
// Make sure search finds all documents
|
||||
String countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0")));
|
||||
Request countRequest = new Request("GET", "/restored_" + index + "/_search");
|
||||
countRequest.addParameter("size", "0");
|
||||
String countResponse = toStr(client().performRequest(countRequest));
|
||||
assertThat(countResponse, containsString("\"total\":" + count));
|
||||
|
||||
// Add some extra documents to the index to be sure we can still write to it after restoring it
|
||||
|
@ -954,61 +947,56 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
bulk.append("{\"index\":{\"_id\":\"").append(count + i).append("\"}}\n");
|
||||
bulk.append("{\"test\":\"test\"}\n");
|
||||
}
|
||||
client().performRequest("POST", "/restored_" + index + "/doc/_bulk", singletonMap("refresh", "true"),
|
||||
new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
|
||||
Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/doc/_bulk");
|
||||
writeToRestoredRequest.addParameter("refresh", "true");
|
||||
writeToRestoredRequest.setJsonEntity(bulk.toString());
|
||||
client().performRequest(writeToRestoredRequest);
|
||||
|
||||
// And count to make sure the add worked
|
||||
// Make sure search finds all documents
|
||||
countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0")));
|
||||
assertThat(countResponse, containsString("\"total\":" + (count + extras)));
|
||||
Request countAfterWriteRequest = new Request("GET", "/restored_" + index + "/_search");
|
||||
countAfterWriteRequest.addParameter("size", "0");
|
||||
String countAfterWriteResponse = toStr(client().performRequest(countAfterWriteRequest));
|
||||
assertThat(countAfterWriteResponse, containsString("\"total\":" + (count + extras)));
|
||||
|
||||
// Clean up the index for the next iteration
|
||||
client().performRequest("DELETE", "/restored_*");
|
||||
client().performRequest(new Request("DELETE", "/restored_*"));
|
||||
|
||||
// Check settings added by the restore process
|
||||
map = toMap(client().performRequest("GET", "/_cluster/settings", singletonMap("flat_settings", "true")));
|
||||
Map<String, Object> expected = new HashMap<>();
|
||||
expected.put("transient", emptyMap());
|
||||
expected.put("persistent", singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString()));
|
||||
if (expected.equals(map) == false) {
|
||||
Request clusterSettingsRequest = new Request("GET", "/_cluster/settings");
|
||||
clusterSettingsRequest.addParameter("flat_settings", "true");
|
||||
Map<String, Object> clusterSettingsResponse = entityAsMap(client().performRequest(clusterSettingsRequest));
|
||||
Map<String, Object> expectedClusterSettings = new HashMap<>();
|
||||
expectedClusterSettings.put("transient", emptyMap());
|
||||
expectedClusterSettings.put("persistent",
|
||||
singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString()));
|
||||
if (expectedClusterSettings.equals(clusterSettingsResponse) == false) {
|
||||
NotEqualMessageBuilder builder = new NotEqualMessageBuilder();
|
||||
builder.compareMaps(map, expected);
|
||||
builder.compareMaps(clusterSettingsResponse, expectedClusterSettings);
|
||||
fail("settings don't match:\n" + builder.toString());
|
||||
}
|
||||
|
||||
// Check that the template was restored successfully
|
||||
map = toMap(client().performRequest("GET", "/_template/test_template"));
|
||||
expected = new HashMap<>();
|
||||
Map<String, Object> getTemplateResponse = entityAsMap(client().performRequest(new Request("GET", "/_template/test_template")));
|
||||
Map<String, Object> expectedTemplate = new HashMap<>();
|
||||
if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_0_0_beta1)) {
|
||||
expected.put("template", "evil_*");
|
||||
expectedTemplate.put("template", "evil_*");
|
||||
} else {
|
||||
expected.put("index_patterns", singletonList("evil_*"));
|
||||
expectedTemplate.put("index_patterns", singletonList("evil_*"));
|
||||
}
|
||||
expected.put("settings", singletonMap("index", singletonMap("number_of_shards", "1")));
|
||||
expected.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true))));
|
||||
expected.put("order", 0);
|
||||
expectedTemplate.put("settings", singletonMap("index", singletonMap("number_of_shards", "1")));
|
||||
expectedTemplate.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true))));
|
||||
expectedTemplate.put("order", 0);
|
||||
Map<String, Object> aliases = new HashMap<>();
|
||||
aliases.put("alias1", emptyMap());
|
||||
aliases.put("alias2", singletonMap("filter", singletonMap("term", singletonMap("version", tookOnVersion.toString()))));
|
||||
expected.put("aliases", aliases);
|
||||
expected = singletonMap("test_template", expected);
|
||||
if (false == expected.equals(map)) {
|
||||
expectedTemplate.put("aliases", aliases);
|
||||
expectedTemplate = singletonMap("test_template", expectedTemplate);
|
||||
if (false == expectedTemplate.equals(getTemplateResponse)) {
|
||||
NotEqualMessageBuilder builder = new NotEqualMessageBuilder();
|
||||
builder.compareMaps(map, expected);
|
||||
builder.compareMaps(getTemplateResponse, expectedTemplate);
|
||||
fail("template doesn't match:\n" + builder.toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters required to get the version of Elasticsearch that took the snapshot.
|
||||
* On versions after 5.5 we need a {@code verbose} parameter.
|
||||
*/
|
||||
private Map<String, String> listSnapshotVerboseParams() {
|
||||
if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0)) {
|
||||
return emptyMap();
|
||||
}
|
||||
return singletonMap("verbose", "true");
|
||||
}
|
||||
|
||||
// TODO tests for upgrades after shrink. We've had trouble with shrink in the past.
|
||||
|
@ -1018,14 +1006,15 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
logger.info("Indexing {} random documents", count);
|
||||
for (int i = 0; i < count; i++) {
|
||||
logger.debug("Indexing document [{}]", i);
|
||||
client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(),
|
||||
new StringEntity(Strings.toString(docSupplier.apply(i)), ContentType.APPLICATION_JSON));
|
||||
Request createDocument = new Request("POST", "/" + index + "/doc/" + i);
|
||||
createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i)));
|
||||
client().performRequest(createDocument);
|
||||
if (rarely()) {
|
||||
refresh();
|
||||
}
|
||||
if (flushAllowed && rarely()) {
|
||||
logger.debug("Flushing [{}]", index);
|
||||
client().performRequest("POST", "/" + index + "/_flush");
|
||||
client().performRequest(new Request("POST", "/" + index + "/_flush"));
|
||||
}
|
||||
}
|
||||
if (saveInfo) {
|
||||
|
@ -1042,13 +1031,16 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
infoDoc.field("value", value);
|
||||
infoDoc.endObject();
|
||||
// Only create the first version so we know how many documents are created when the index is first created
|
||||
Map<String, String> params = singletonMap("op_type", "create");
|
||||
client().performRequest("PUT", "/info/doc/" + index + "_" + type, params,
|
||||
new StringEntity(Strings.toString(infoDoc), ContentType.APPLICATION_JSON));
|
||||
Request request = new Request("PUT", "/info/doc/" + index + "_" + type);
|
||||
request.addParameter("op_type", "create");
|
||||
request.setJsonEntity(Strings.toString(infoDoc));
|
||||
client().performRequest(request);
|
||||
}
|
||||
|
||||
private String loadInfoDocument(String type) throws IOException {
|
||||
String doc = toStr(client().performRequest("GET", "/info/doc/" + index + "_" + type, singletonMap("filter_path", "_source")));
|
||||
Request request = new Request("GET", "/info/doc/" + index + "_" + type);
|
||||
request.addParameter("filter_path", "_source");
|
||||
String doc = toStr(client().performRequest(request));
|
||||
Matcher m = Pattern.compile("\"value\":\"(.+)\"").matcher(doc);
|
||||
assertTrue(doc, m.find());
|
||||
return m.group(1);
|
||||
|
@ -1060,11 +1052,13 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
|
||||
private void refresh() throws IOException {
|
||||
logger.debug("Refreshing [{}]", index);
|
||||
client().performRequest("POST", "/" + index + "/_refresh");
|
||||
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
|
||||
}
|
||||
|
||||
private List<String> dataNodes(String index, RestClient client) throws IOException {
|
||||
Response response = client.performRequest("GET", index + "/_stats", singletonMap("level", "shards"));
|
||||
Request request = new Request("GET", index + "/_stats");
|
||||
request.addParameter("level", "shards");
|
||||
Response response = client.performRequest(request);
|
||||
List<String> nodes = new ArrayList<>();
|
||||
List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0");
|
||||
for (Object shard : shardStats) {
|
||||
|
@ -1073,4 +1067,21 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
|||
}
|
||||
return nodes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for an index to have green health, waiting longer than
|
||||
* {@link ESRestTestCase#ensureGreen}.
|
||||
*/
|
||||
protected void ensureGreenLongWait(String index) throws IOException {
|
||||
Request request = new Request("GET", "/_cluster/health/" + index);
|
||||
request.addParameter("timeout", "2m");
|
||||
request.addParameter("wait_for_status", "green");
|
||||
request.addParameter("wait_for_no_relocating_shards", "true");
|
||||
request.addParameter("wait_for_events", "languid");
|
||||
request.addParameter("level", "shards");
|
||||
Map<String, Object> healthRsp = entityAsMap(client().performRequest(request));
|
||||
logger.info("health api response: {}", healthRsp);
|
||||
assertEquals("green", healthRsp.get("status"));
|
||||
assertFalse((Boolean) healthRsp.get("timed_out"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.upgrades;
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.client.Request;
|
||||
|
@ -32,14 +30,12 @@ import org.elasticsearch.test.rest.yaml.ObjectPath;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
|
||||
import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING;
|
||||
import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY;
|
||||
|
@ -51,6 +47,8 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
* In depth testing of the recovery mechanism during a rolling restart.
|
||||
*/
|
||||
public class RecoveryIT extends AbstractRollingTestCase {
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31291")
|
||||
public void testHistoryUUIDIsGenerated() throws Exception {
|
||||
final String index = "index_history_uuid";
|
||||
if (CLUSTER_TYPE == ClusterType.OLD) {
|
||||
|
@ -65,8 +63,9 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
createIndex(index, settings.build());
|
||||
} else if (CLUSTER_TYPE == ClusterType.UPGRADED) {
|
||||
ensureGreen(index);
|
||||
Response response = client().performRequest("GET", index + "/_stats", Collections.singletonMap("level", "shards"));
|
||||
assertOK(response);
|
||||
Request shardStatsRequest = new Request("GET", index + "/_stats");
|
||||
shardStatsRequest.addParameter("level", "shards");
|
||||
Response response = client().performRequest(shardStatsRequest);
|
||||
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
||||
List<Object> shardStats = objectPath.evaluate("indices." + index + ".shards.0");
|
||||
assertThat(shardStats, hasSize(2));
|
||||
|
@ -87,8 +86,9 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
private int indexDocs(String index, final int idStart, final int numDocs) throws IOException {
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
final int id = idStart + i;
|
||||
assertOK(client().performRequest("PUT", index + "/test/" + id, emptyMap(),
|
||||
new StringEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}", ContentType.APPLICATION_JSON)));
|
||||
Request indexDoc = new Request("PUT", index + "/test/" + id);
|
||||
indexDoc.setJsonEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}");
|
||||
client().performRequest(indexDoc);
|
||||
}
|
||||
return numDocs;
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
|
||||
public void testRecoveryWithConcurrentIndexing() throws Exception {
|
||||
final String index = "recovery_with_concurrent_indexing";
|
||||
Response response = client().performRequest("GET", "_nodes");
|
||||
Response response = client().performRequest(new Request("GET", "_nodes"));
|
||||
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
||||
final Map<String, Object> nodeMap = objectPath.evaluate("nodes");
|
||||
List<String> nodes = new ArrayList<>(nodeMap.keySet());
|
||||
|
@ -139,7 +139,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null));
|
||||
asyncIndexDocs(index, 10, 50).get();
|
||||
ensureGreen(index);
|
||||
assertOK(client().performRequest("POST", index + "/_refresh"));
|
||||
client().performRequest(new Request("POST", index + "/_refresh"));
|
||||
assertCount(index, "_only_nodes:" + nodes.get(0), 60);
|
||||
assertCount(index, "_only_nodes:" + nodes.get(1), 60);
|
||||
assertCount(index, "_only_nodes:" + nodes.get(2), 60);
|
||||
|
@ -150,7 +150,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null));
|
||||
asyncIndexDocs(index, 60, 50).get();
|
||||
ensureGreen(index);
|
||||
assertOK(client().performRequest("POST", index + "/_refresh"));
|
||||
client().performRequest(new Request("POST", index + "/_refresh"));
|
||||
assertCount(index, "_only_nodes:" + nodes.get(0), 110);
|
||||
assertCount(index, "_only_nodes:" + nodes.get(1), 110);
|
||||
assertCount(index, "_only_nodes:" + nodes.get(2), 110);
|
||||
|
@ -161,15 +161,16 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
}
|
||||
|
||||
private void assertCount(final String index, final String preference, final int expectedCount) throws IOException {
|
||||
final Response response = client().performRequest("GET", index + "/_count", Collections.singletonMap("preference", preference));
|
||||
assertOK(response);
|
||||
final Request request = new Request("GET", index + "/_count");
|
||||
request.addParameter("preference", preference);
|
||||
final Response response = client().performRequest(request);
|
||||
final int actualCount = Integer.parseInt(ObjectPath.createFromResponse(response).evaluate("count").toString());
|
||||
assertThat(actualCount, equalTo(expectedCount));
|
||||
}
|
||||
|
||||
|
||||
private String getNodeId(Predicate<Version> versionPredicate) throws IOException {
|
||||
Response response = client().performRequest("GET", "_nodes");
|
||||
Response response = client().performRequest(new Request("GET", "_nodes"));
|
||||
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
||||
Map<String, Object> nodesAsMap = objectPath.evaluate("nodes");
|
||||
for (String id : nodesAsMap.keySet()) {
|
||||
|
@ -216,7 +217,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
updateIndexSettings(index, Settings.builder().put("index.routing.allocation.include._id", newNode));
|
||||
asyncIndexDocs(index, 10, 50).get();
|
||||
ensureGreen(index);
|
||||
assertOK(client().performRequest("POST", index + "/_refresh"));
|
||||
client().performRequest(new Request("POST", index + "/_refresh"));
|
||||
assertCount(index, "_only_nodes:" + newNode, 60);
|
||||
break;
|
||||
case UPGRADED:
|
||||
|
@ -226,8 +227,8 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||
);
|
||||
asyncIndexDocs(index, 60, 50).get();
|
||||
ensureGreen(index);
|
||||
assertOK(client().performRequest("POST", index + "/_refresh"));
|
||||
Response response = client().performRequest("GET", "_nodes");
|
||||
client().performRequest(new Request("POST", index + "/_refresh"));
|
||||
Response response = client().performRequest(new Request("GET", "_nodes"));
|
||||
ObjectPath objectPath = ObjectPath.createFromResponse(response);
|
||||
final Map<String, Object> nodeMap = objectPath.evaluate("nodes");
|
||||
List<String> nodes = new ArrayList<>(nodeMap.keySet());
|
||||
|
|
|
@ -57,6 +57,7 @@ import static org.hamcrest.CoreMatchers.is;
|
|||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.isEmptyString;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assume.assumeThat;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
@ -302,5 +303,26 @@ public abstract class ArchiveTestCase extends PackagingTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void test90SecurityCliPackaging() {
|
||||
assumeThat(installation, is(notNullValue()));
|
||||
|
||||
final Installation.Executables bin = installation.executables();
|
||||
final Shell sh = new Shell();
|
||||
|
||||
if (distribution().equals(Distribution.DEFAULT_TAR) || distribution().equals(Distribution.DEFAULT_ZIP)) {
|
||||
assertTrue(Files.exists(installation.lib.resolve("tools").resolve("security-cli")));
|
||||
Platforms.onLinux(() -> {
|
||||
final Result result = sh.run(bin.elasticsearchCertutil + " help");
|
||||
assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack"));
|
||||
});
|
||||
|
||||
Platforms.onWindows(() -> {
|
||||
final Result result = sh.run(bin.elasticsearchCertutil + " help");
|
||||
assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack"));
|
||||
});
|
||||
} else if (distribution().equals(Distribution.OSS_TAR) || distribution().equals(Distribution.OSS_ZIP)) {
|
||||
assertFalse(Files.exists(installation.lib.resolve("tools").resolve("security-cli")));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -101,6 +101,7 @@ public class Installation {
|
|||
public final Path elasticsearchPlugin = platformExecutable("elasticsearch-plugin");
|
||||
public final Path elasticsearchKeystore = platformExecutable("elasticsearch-keystore");
|
||||
public final Path elasticsearchTranslog = platformExecutable("elasticsearch-translog");
|
||||
public final Path elasticsearchCertutil = platformExecutable("elasticsearch-certutil");
|
||||
|
||||
private Path platformExecutable(String name) {
|
||||
final String platformExecutableName = Platforms.WINDOWS
|
||||
|
|
|
@ -106,7 +106,7 @@ dependencies {
|
|||
compile 'com.carrotsearch:hppc:0.7.1'
|
||||
|
||||
// time handling, remove with java 8 time
|
||||
compile 'joda-time:joda-time:2.9.9'
|
||||
compile 'joda-time:joda-time:2.10'
|
||||
|
||||
// percentiles aggregation
|
||||
compile 'com.tdunning:t-digest:3.2'
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
f66c8125d1057ffce6c4e29e624cac863e110e2b
|
|
@ -1 +0,0 @@
|
|||
f7b520c458572890807d143670c9b24f4de90897
|
|
@ -32,6 +32,11 @@ public interface AliasesRequest extends IndicesRequest.Replaceable {
|
|||
*/
|
||||
String[] aliases();
|
||||
|
||||
/**
|
||||
* Returns the aliases as they were originally requested, before any potential name resolution
|
||||
*/
|
||||
String[] getOriginalAliases();
|
||||
|
||||
/**
|
||||
* Replaces current aliases with the provided aliases.
|
||||
*
|
||||
|
|
|
@ -48,7 +48,6 @@ public class ClusterGetSettingsResponse extends ActionResponse implements ToXCon
|
|||
static final String TRANSIENT_FIELD = "transient";
|
||||
static final String DEFAULTS_FIELD = "defaults";
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<ClusterGetSettingsResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"cluster_get_settings_response",
|
||||
|
|
|
@ -214,6 +214,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
private final AliasActions.Type type;
|
||||
private String[] indices;
|
||||
private String[] aliases = Strings.EMPTY_ARRAY;
|
||||
private String[] originalAliases = Strings.EMPTY_ARRAY;
|
||||
private String filter;
|
||||
private String routing;
|
||||
private String indexRouting;
|
||||
|
@ -238,6 +239,9 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
writeIndex = in.readOptionalBoolean();
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
originalAliases = in.readStringArray();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -252,6 +256,9 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
|
||||
out.writeOptionalBoolean(writeIndex);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
out.writeStringArray(originalAliases);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -315,6 +322,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
}
|
||||
}
|
||||
this.aliases = aliases;
|
||||
this.originalAliases = aliases;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -329,6 +337,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
throw new IllegalArgumentException("[alias] can't be empty string");
|
||||
}
|
||||
this.aliases = new String[] {alias};
|
||||
this.originalAliases = aliases;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -432,6 +441,11 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
this.aliases = aliases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getOriginalAliases() {
|
||||
return originalAliases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean expandAliasesWildcards() {
|
||||
//remove operations support wildcards among aliases, add operations don't
|
||||
|
@ -579,7 +593,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
|||
}, AliasActions.PARSER, new ParseField("actions"));
|
||||
}
|
||||
|
||||
public static IndicesAliasesRequest fromXContent(XContentParser parser) throws IOException {
|
||||
public static IndicesAliasesRequest fromXContent(XContentParser parser) {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -95,7 +95,7 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction<Ind
|
|||
Set<String> aliases = new HashSet<>();
|
||||
for (AliasActions action : actions) {
|
||||
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), action.indices());
|
||||
Collections.addAll(aliases, action.aliases());
|
||||
Collections.addAll(aliases, action.getOriginalAliases());
|
||||
for (String index : concreteIndices) {
|
||||
switch (action.actionType()) {
|
||||
case ADD:
|
||||
|
@ -142,7 +142,7 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction<Ind
|
|||
if (action.expandAliasesWildcards()) {
|
||||
//for DELETE we expand the aliases
|
||||
String[] indexAsArray = {concreteIndex};
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliasMetaData = metaData.findAliases(action.aliases(), indexAsArray);
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliasMetaData = metaData.findAliases(action, indexAsArray);
|
||||
List<String> finalAliases = new ArrayList<>();
|
||||
for (ObjectCursor<List<AliasMetaData>> curAliases : aliasMetaData.values()) {
|
||||
for (AliasMetaData aliasMeta: curAliases.value) {
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction<Get
|
|||
@Override
|
||||
protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener<GetAliasesResponse> listener) {
|
||||
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request);
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases = state.metaData().findAliases(request.aliases(), concreteIndices);
|
||||
ImmutableOpenMap<String, List<AliasMetaData>> aliases = state.metaData().findAliases(request, concreteIndices);
|
||||
listener.onResponse(new GetAliasesResponse(postProcess(request, concreteIndices, aliases)));
|
||||
}
|
||||
|
||||
|
|
|
@ -32,15 +32,14 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -110,7 +109,7 @@ public class TransportGetIndexAction extends TransportClusterInfoAction<GetIndex
|
|||
break;
|
||||
case ALIASES:
|
||||
if (!doneAliases) {
|
||||
aliasesResult = state.metaData().findAliases(Strings.EMPTY_ARRAY, concreteIndices);
|
||||
aliasesResult = state.metaData().findAllAliases(concreteIndices);
|
||||
doneAliases = true;
|
||||
}
|
||||
break;
|
||||
|
|
|
@ -45,7 +45,6 @@ public class QueryExplanation implements Streamable, ToXContentFragment {
|
|||
|
||||
public static final int RANDOM_SHARD = -1;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static ConstructingObjectParser<QueryExplanation, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"query_explanation",
|
||||
true,
|
||||
|
|
|
@ -129,7 +129,6 @@ public class GetResponse extends ActionResponse implements Iterable<DocumentFiel
|
|||
/**
|
||||
* The source of the document (As a map).
|
||||
*/
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public Map<String, Object> getSourceAsMap() throws ElasticsearchParseException {
|
||||
return getResult.sourceAsMap();
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.action.get;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.TransportActions;
|
||||
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
|
||||
|
@ -90,9 +89,9 @@ public class TransportShardMultiGetAction extends TransportSingleShardAction<Mul
|
|||
GetResult getResult = indexShard.getService().get(item.type(), item.id(), item.storedFields(), request.realtime(), item.version(),
|
||||
item.versionType(), item.fetchSourceContext());
|
||||
response.add(request.locations.get(i), new GetResponse(getResult));
|
||||
} catch (Exception e) {
|
||||
} catch (RuntimeException e) {
|
||||
if (TransportActions.isShardNotAvailableException(e)) {
|
||||
throw (ElasticsearchException) e;
|
||||
throw e;
|
||||
} else {
|
||||
logger.debug(() -> new ParameterizedMessage("{} failed to execute multi_get for [{}]/[{}]", shardId,
|
||||
item.type(), item.id()), e);
|
||||
|
|
|
@ -32,8 +32,8 @@ import org.elasticsearch.ingest.IngestDocument;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class SimulateProcessorResult implements Writeable, ToXContentObject {
|
||||
|
||||
|
@ -42,7 +42,6 @@ public class SimulateProcessorResult implements Writeable, ToXContentObject {
|
|||
private final WriteableIngestDocument ingestDocument;
|
||||
private final Exception failure;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static final ConstructingObjectParser<ElasticsearchException, Void> IGNORED_ERROR_PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"ignored_error_parser",
|
||||
|
@ -57,7 +56,6 @@ public class SimulateProcessorResult implements Writeable, ToXContentObject {
|
|||
);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<SimulateProcessorResult, Void> PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"simulate_processor_result",
|
||||
|
|
|
@ -94,7 +94,6 @@ final class WriteableIngestDocument implements Writeable, ToXContentFragment {
|
|||
);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<WriteableIngestDocument, Void> PARSER =
|
||||
new ConstructingObjectParser<>(
|
||||
"writeable_ingest_document",
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.action.support;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||
|
@ -205,7 +206,7 @@ public final class ActiveShardCount implements Writeable {
|
|||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
@SuppressWarnings("unchecked") ActiveShardCount that = (ActiveShardCount) o;
|
||||
ActiveShardCount that = (ActiveShardCount) o;
|
||||
return value == that.value;
|
||||
}
|
||||
|
||||
|
|
|
@ -72,7 +72,6 @@ public abstract class ReplicationRequestBuilder<Request extends ReplicationReque
|
|||
* shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)}
|
||||
* to get the ActiveShardCount.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public RequestBuilder setWaitForActiveShards(final int waitForActiveShards) {
|
||||
return setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards));
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.action.termvectors;
|
||||
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.TransportActions;
|
||||
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
|
||||
|
@ -84,13 +83,13 @@ public class TransportShardMultiTermsVectorAction extends TransportSingleShardAc
|
|||
try {
|
||||
TermVectorsResponse termVectorsResponse = TermVectorsService.getTermVectors(indexShard, termVectorsRequest);
|
||||
response.add(request.locations.get(i), termVectorsResponse);
|
||||
} catch (Exception t) {
|
||||
if (TransportActions.isShardNotAvailableException(t)) {
|
||||
throw (ElasticsearchException) t;
|
||||
} catch (RuntimeException e) {
|
||||
if (TransportActions.isShardNotAvailableException(e)) {
|
||||
throw e;
|
||||
} else {
|
||||
logger.debug(() -> new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), t);
|
||||
logger.debug(() -> new ParameterizedMessage("{} failed to execute multi term vectors for [{}]/[{}]", shardId, termVectorsRequest.type(), termVectorsRequest.id()), e);
|
||||
response.add(request.locations.get(i),
|
||||
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), t));
|
||||
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.cluster;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterState.Custom;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
|
@ -165,7 +166,7 @@ public class RestoreInProgress extends AbstractNamedDiffable<Custom> implements
|
|||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
@SuppressWarnings("unchecked") Entry entry = (Entry) o;
|
||||
Entry entry = (Entry) o;
|
||||
return snapshot.equals(entry.snapshot) &&
|
||||
state == entry.state &&
|
||||
indices.equals(entry.indices) &&
|
||||
|
@ -291,7 +292,7 @@ public class RestoreInProgress extends AbstractNamedDiffable<Custom> implements
|
|||
return false;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked") ShardRestoreStatus status = (ShardRestoreStatus) o;
|
||||
ShardRestoreStatus status = (ShardRestoreStatus) o;
|
||||
return state == status.state &&
|
||||
Objects.equals(nodeId, status.nodeId) &&
|
||||
Objects.equals(reason, status.reason);
|
||||
|
|
|
@ -161,7 +161,6 @@ public final class IndexGraveyard implements MetaData.Custom {
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Diff<MetaData.Custom> diff(final MetaData.Custom previous) {
|
||||
return new IndexGraveyardDiff((IndexGraveyard) previous, this);
|
||||
}
|
||||
|
@ -321,7 +320,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
|||
|
||||
@Override
|
||||
public IndexGraveyard apply(final MetaData.Custom previous) {
|
||||
@SuppressWarnings("unchecked") final IndexGraveyard old = (IndexGraveyard) previous;
|
||||
final IndexGraveyard old = (IndexGraveyard) previous;
|
||||
if (removedCount > old.tombstones.size()) {
|
||||
throw new IllegalStateException("IndexGraveyardDiff cannot remove [" + removedCount + "] entries from [" +
|
||||
old.tombstones.size() + "] tombstones.");
|
||||
|
@ -416,7 +415,7 @@ public final class IndexGraveyard implements MetaData.Custom {
|
|||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
@SuppressWarnings("unchecked") Tombstone that = (Tombstone) other;
|
||||
Tombstone that = (Tombstone) other;
|
||||
return index.equals(that.index) && deleteDateInMillis == that.deleteDateInMillis;
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.carrotsearch.hppc.LongArrayList;
|
|||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.rollover.RolloverInfo;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
|
@ -685,7 +686,6 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
|
|||
return lookupPrototypeSafe(key).readFrom(in);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Diff<Custom> readDiff(StreamInput in, String key) throws IOException {
|
||||
return lookupPrototypeSafe(key).readDiffFrom(in);
|
||||
|
|
|
@ -381,7 +381,6 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
aliases.build(), customs.build());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static void toXContent(IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params)
|
||||
throws IOException {
|
||||
builder.startObject(indexTemplateMetaData.name());
|
||||
|
|
|
@ -22,8 +22,10 @@ package org.elasticsearch.cluster.metadata;
|
|||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.util.CollectionUtil;
|
||||
import org.elasticsearch.action.AliasesRequest;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ClusterState.FeatureAware;
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
|
@ -168,7 +170,6 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
|||
|
||||
private final SortedMap<String, AliasOrIndex> aliasAndIndexLookup;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
MetaData(String clusterUUID, long version, Settings transientSettings, Settings persistentSettings,
|
||||
ImmutableOpenMap<String, IndexMetaData> indices, ImmutableOpenMap<String, IndexTemplateMetaData> templates,
|
||||
ImmutableOpenMap<String, Custom> customs, String[] allIndices, String[] allOpenIndices, String[] allClosedIndices,
|
||||
|
@ -248,21 +249,53 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
|||
}
|
||||
|
||||
/**
|
||||
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
|
||||
* that point to the specified concrete indices or match partially with the indices via wildcards.
|
||||
* Finds the specific index aliases that point to the specified concrete indices or match partially with the indices via wildcards.
|
||||
*
|
||||
* @param aliases The names of the index aliases to find
|
||||
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
|
||||
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are
|
||||
* present for that index
|
||||
*/
|
||||
public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final String[] aliases, String[] concreteIndices) {
|
||||
public ImmutableOpenMap<String, List<AliasMetaData>> findAllAliases(String[] concreteIndices) {
|
||||
return findAliases(Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, concreteIndices);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
|
||||
* that point to the specified concrete indices or match partially with the indices via wildcards.
|
||||
*
|
||||
* @param aliasesRequest The request to find aliases for
|
||||
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
|
||||
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are
|
||||
* present for that index
|
||||
*/
|
||||
public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final AliasesRequest aliasesRequest, String[] concreteIndices) {
|
||||
return findAliases(aliasesRequest.getOriginalAliases(), aliasesRequest.aliases(), concreteIndices);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
|
||||
* that point to the specified concrete indices or match partially with the indices via wildcards.
|
||||
*
|
||||
* @param aliases The aliases to look for
|
||||
* @param originalAliases The original aliases that the user originally requested
|
||||
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
|
||||
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are
|
||||
* present for that index
|
||||
*/
|
||||
private ImmutableOpenMap<String, List<AliasMetaData>> findAliases(String[] originalAliases, String[] aliases,
|
||||
String[] concreteIndices) {
|
||||
assert aliases != null;
|
||||
assert originalAliases != null;
|
||||
assert concreteIndices != null;
|
||||
if (concreteIndices.length == 0) {
|
||||
return ImmutableOpenMap.of();
|
||||
}
|
||||
|
||||
//if aliases were provided but they got replaced with empty aliases, return empty map
|
||||
if (originalAliases.length > 0 && aliases.length == 0) {
|
||||
return ImmutableOpenMap.of();
|
||||
}
|
||||
|
||||
boolean matchAllAliases = matchAllAliases(aliases);
|
||||
ImmutableOpenMap.Builder<String, List<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
|
||||
for (String index : concreteIndices) {
|
||||
|
@ -967,7 +1000,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
|||
}
|
||||
|
||||
public IndexGraveyard indexGraveyard() {
|
||||
@SuppressWarnings("unchecked") IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE);
|
||||
IndexGraveyard graveyard = (IndexGraveyard) getCustom(IndexGraveyard.TYPE);
|
||||
return graveyard;
|
||||
}
|
||||
|
||||
|
|
|
@ -217,7 +217,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject {
|
|||
return false;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked") SnapshotRecoverySource that = (SnapshotRecoverySource) o;
|
||||
SnapshotRecoverySource that = (SnapshotRecoverySource) o;
|
||||
return snapshot.equals(that.snapshot) && index.equals(that.index) && version.equals(that.version);
|
||||
}
|
||||
|
||||
|
|
|
@ -175,7 +175,7 @@ public abstract class AbstractAllocationDecision implements ToXContentFragment,
|
|||
if (other == null || other instanceof AbstractAllocationDecision == false) {
|
||||
return false;
|
||||
}
|
||||
@SuppressWarnings("unchecked") AbstractAllocationDecision that = (AbstractAllocationDecision) other;
|
||||
AbstractAllocationDecision that = (AbstractAllocationDecision) other;
|
||||
return Objects.equals(targetNode, that.targetNode) && Objects.equals(nodeDecisions, that.nodeDecisions);
|
||||
}
|
||||
|
||||
|
|
|
@ -316,7 +316,7 @@ public class AllocateUnassignedDecision extends AbstractAllocationDecision {
|
|||
if (other instanceof AllocateUnassignedDecision == false) {
|
||||
return false;
|
||||
}
|
||||
@SuppressWarnings("unchecked") AllocateUnassignedDecision that = (AllocateUnassignedDecision) other;
|
||||
AllocateUnassignedDecision that = (AllocateUnassignedDecision) other;
|
||||
return Objects.equals(allocationStatus, that.allocationStatus)
|
||||
&& Objects.equals(allocationId, that.allocationId)
|
||||
&& reuseStore == that.reuseStore
|
||||
|
|
|
@ -300,7 +300,7 @@ public final class MoveDecision extends AbstractAllocationDecision {
|
|||
if (other instanceof MoveDecision == false) {
|
||||
return false;
|
||||
}
|
||||
@SuppressWarnings("unchecked") MoveDecision that = (MoveDecision) other;
|
||||
MoveDecision that = (MoveDecision) other;
|
||||
return Objects.equals(allocationDecision, that.allocationDecision)
|
||||
&& Objects.equals(canRemainDecision, that.canRemainDecision)
|
||||
&& Objects.equals(clusterRebalanceDecision, that.clusterRebalanceDecision)
|
||||
|
|
|
@ -54,7 +54,6 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple
|
|||
listeners.remove(listener);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override
|
||||
public void start() {
|
||||
if (!lifecycle.canMoveToStarted()) {
|
||||
|
@ -72,7 +71,6 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple
|
|||
|
||||
protected abstract void doStart();
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override
|
||||
public void stop() {
|
||||
if (!lifecycle.canMoveToStopped()) {
|
||||
|
|
|
@ -32,7 +32,6 @@ class ConstructorInjectorStore {
|
|||
private final FailableCache<TypeLiteral<?>, ConstructorInjector<?>> cache
|
||||
= new FailableCache<TypeLiteral<?>, ConstructorInjector<?>>() {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
protected ConstructorInjector<?> create(TypeLiteral<?> type, Errors errors)
|
||||
throws ErrorsException {
|
||||
return createConstructor(type, errors);
|
||||
|
|
|
@ -101,7 +101,6 @@ class TypeConverterBindingProcessor extends AbstractProcessor {
|
|||
},
|
||||
new TypeConverter() {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Object convert(String value, TypeLiteral<?> toType) {
|
||||
try {
|
||||
return Class.forName(value);
|
||||
|
@ -128,7 +127,6 @@ class TypeConverterBindingProcessor extends AbstractProcessor {
|
|||
|
||||
TypeConverter typeConverter = new TypeConverter() {
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Object convert(String value, TypeLiteral<?> toType) {
|
||||
try {
|
||||
return parser.invoke(null, value);
|
||||
|
|
|
@ -42,7 +42,6 @@ class AssistedConstructor<T> {
|
|||
private final ParameterListKey assistedParameters;
|
||||
private final List<Parameter> allParameters;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
AssistedConstructor(Constructor<T> constructor, List<TypeLiteral<?>> parameterTypes) {
|
||||
this.constructor = constructor;
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ public class ProviderMethod<T> implements ProviderWithDependencies<T> {
|
|||
|
||||
try {
|
||||
// We know this cast is safe because T is the method's return type.
|
||||
@SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"})
|
||||
@SuppressWarnings({"unchecked"})
|
||||
T result = (T) method.invoke(instance, parameters);
|
||||
return result;
|
||||
} catch (IllegalAccessException e) {
|
||||
|
|
|
@ -220,7 +220,6 @@ public abstract class Multibinder<T> {
|
|||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void configure(Binder binder) {
|
||||
checkConfiguration(!isInitialized(), "Multibinder was already initialized");
|
||||
|
||||
|
|
|
@ -78,8 +78,7 @@ public abstract class DefaultBindingTargetVisitor<T, V> implements BindingTarget
|
|||
|
||||
// javac says it's an error to cast ProviderBinding<? extends T> to Binding<? extends T>
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public V visit(ProviderBinding<? extends T> providerBinding) {
|
||||
return visitOther((Binding<? extends T>) providerBinding);
|
||||
return visitOther(providerBinding);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -518,7 +518,6 @@ public abstract class StreamInput extends InputStream {
|
|||
return (Map<String, Object>) readGenericValue();
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Nullable
|
||||
public Object readGenericValue() throws IOException {
|
||||
byte type = readByte();
|
||||
|
|
|
@ -178,7 +178,6 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
|
|||
/**
|
||||
* The source of the document (As a map).
|
||||
*/
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public Map<String, Object> sourceAsMap() throws ElasticsearchParseException {
|
||||
if (source == null) {
|
||||
return null;
|
||||
|
|
|
@ -197,7 +197,6 @@ public class DocumentMapper implements ToXContentFragment {
|
|||
return mapping.root;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
public <T extends MetadataFieldMapper> T metadataMapper(Class<T> type) {
|
||||
return mapping.metadataMapper(type);
|
||||
}
|
||||
|
|
|
@ -427,7 +427,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
|||
// the master node restoring mappings from disk or data nodes
|
||||
// deserializing cluster state that was sent by the master node,
|
||||
// this check will be skipped.
|
||||
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size());
|
||||
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size() + fieldAliasMappers.size());
|
||||
}
|
||||
|
||||
results.put(newMapper.type(), newMapper);
|
||||
|
|
|
@ -230,7 +230,9 @@ public class TypeParsers {
|
|||
} else {
|
||||
throw new MapperParsingException("no type specified for property [" + multiFieldName + "]");
|
||||
}
|
||||
if (type.equals(ObjectMapper.CONTENT_TYPE) || type.equals(ObjectMapper.NESTED_CONTENT_TYPE)) {
|
||||
if (type.equals(ObjectMapper.CONTENT_TYPE)
|
||||
|| type.equals(ObjectMapper.NESTED_CONTENT_TYPE)
|
||||
|| type.equals(FieldAliasMapper.CONTENT_TYPE)) {
|
||||
throw new MapperParsingException("Type [" + type + "] cannot be used in multi field");
|
||||
}
|
||||
|
||||
|
|
|
@ -37,6 +37,7 @@ import java.util.List;
|
|||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* CircuitBreakerService that attempts to redistribute space between breakers
|
||||
|
@ -215,7 +216,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
|
|||
}
|
||||
// Manually add the parent breaker settings since they aren't part of the breaker map
|
||||
allStats.add(new CircuitBreakerStats(CircuitBreaker.PARENT, parentSettings.getLimit(),
|
||||
parentUsed(0L), 1.0, parentTripCount.get()));
|
||||
parentUsed(0L).totalUsage, 1.0, parentTripCount.get()));
|
||||
return new AllCircuitBreakerStats(allStats.toArray(new CircuitBreakerStats[allStats.size()]));
|
||||
}
|
||||
|
||||
|
@ -225,15 +226,26 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
|
|||
return new CircuitBreakerStats(breaker.getName(), breaker.getLimit(), breaker.getUsed(), breaker.getOverhead(), breaker.getTrippedCount());
|
||||
}
|
||||
|
||||
private long parentUsed(long newBytesReserved) {
|
||||
private static class ParentMemoryUsage {
|
||||
final long baseUsage;
|
||||
final long totalUsage;
|
||||
|
||||
ParentMemoryUsage(final long baseUsage, final long totalUsage) {
|
||||
this.baseUsage = baseUsage;
|
||||
this.totalUsage = totalUsage;
|
||||
}
|
||||
}
|
||||
|
||||
private ParentMemoryUsage parentUsed(long newBytesReserved) {
|
||||
if (this.trackRealMemoryUsage) {
|
||||
return currentMemoryUsage() + newBytesReserved;
|
||||
final long current = currentMemoryUsage();
|
||||
return new ParentMemoryUsage(current, current + newBytesReserved);
|
||||
} else {
|
||||
long parentEstimated = 0;
|
||||
for (CircuitBreaker breaker : this.breakers.values()) {
|
||||
parentEstimated += breaker.getUsed() * breaker.getOverhead();
|
||||
}
|
||||
return parentEstimated;
|
||||
return new ParentMemoryUsage(parentEstimated, parentEstimated);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -246,15 +258,37 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
|
|||
* Checks whether the parent breaker has been tripped
|
||||
*/
|
||||
public void checkParentLimit(long newBytesReserved, String label) throws CircuitBreakingException {
|
||||
long totalUsed = parentUsed(newBytesReserved);
|
||||
final ParentMemoryUsage parentUsed = parentUsed(newBytesReserved);
|
||||
long parentLimit = this.parentSettings.getLimit();
|
||||
if (totalUsed > parentLimit) {
|
||||
if (parentUsed.totalUsage > parentLimit) {
|
||||
this.parentTripCount.incrementAndGet();
|
||||
final String message = "[parent] Data too large, data for [" + label + "]" +
|
||||
" would be [" + totalUsed + "/" + new ByteSizeValue(totalUsed) + "]" +
|
||||
final StringBuilder message = new StringBuilder("[parent] Data too large, data for [" + label + "]" +
|
||||
" would be [" + parentUsed.totalUsage + "/" + new ByteSizeValue(parentUsed.totalUsage) + "]" +
|
||||
", which is larger than the limit of [" +
|
||||
parentLimit + "/" + new ByteSizeValue(parentLimit) + "]";
|
||||
throw new CircuitBreakingException(message, totalUsed, parentLimit);
|
||||
parentLimit + "/" + new ByteSizeValue(parentLimit) + "]");
|
||||
if (this.trackRealMemoryUsage) {
|
||||
final long realUsage = parentUsed.baseUsage;
|
||||
message.append(", real usage: [");
|
||||
message.append(realUsage);
|
||||
message.append("/");
|
||||
message.append(new ByteSizeValue(realUsage));
|
||||
message.append("], new bytes reserved: [");
|
||||
message.append(newBytesReserved);
|
||||
message.append("/");
|
||||
message.append(new ByteSizeValue(newBytesReserved));
|
||||
message.append("]");
|
||||
} else {
|
||||
message.append(", usages [");
|
||||
message.append(String.join(", ",
|
||||
this.breakers.entrySet().stream().map(e -> {
|
||||
final CircuitBreaker breaker = e.getValue();
|
||||
final long breakerUsed = (long)(breaker.getUsed() * breaker.getOverhead());
|
||||
return e.getKey() + "=" + breakerUsed + "/" + new ByteSizeValue(breakerUsed);
|
||||
})
|
||||
.collect(Collectors.toList())));
|
||||
message.append("]");
|
||||
}
|
||||
throw new CircuitBreakingException(message.toString(), parentUsed.totalUsage, parentLimit);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -45,7 +45,6 @@ public class NodePersistentTasksExecutor {
|
|||
task.markAsFailed(e);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
try {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue