Merge remote-tracking branch 'es/master' into ccr

* es/master: (23 commits)
  Switch full-cluster-restart to new style Requests (#32140)
  [DOCS] Clarified that you must remove X-Pack plugin when upgrading from pre-6.3. (#32016)
  Remove BouncyCastle dependency from runtime (#32193)
  INGEST: Extend KV Processor (#31789) (#32232)
  INGEST: Make a few Processors callable by Painless (#32170)
  Add region ISO code to GeoIP Ingest plugin (#31669)
  [Tests] Remove QueryStringQueryBuilderTests#toQuery class assertions (#32236)
  Make sure that field aliases count towards the total fields limit. (#32222)
  Switch rolling restart to new style Requests (#32147)
  muting failing test for internal auto date histogram to avoid failure before fix is merged
  MINOR: Remove unused `IndexDynamicSettings` (#32237)
  Fix multi level nested sort (#32204)
  Enhance Parent circuit breaker error message (#32056)
  [ML] Use default request durability for .ml-state index (#32233)
  Remove indices stats timeout from monitoring docs
  Rename ranking evaluation response section (#32166)
  Dependencies: Upgrade to joda time 2.10 (#32160)
  Remove aliases resolution limitations when security is enabled (#31952)
  Ensure that field aliases cannot be used in multi-fields. (#32219)
  TESTS: Check for Netty resource leaks (#31861)
  ...
This commit is contained in:
Martijn van Groningen 2018-07-21 09:06:13 +02:00
commit 8ed3624f47
No known key found for this signature in database
GPG Key ID: AB236F4FCF2AF12A
114 changed files with 2209 additions and 755 deletions

View File

@ -25,8 +25,8 @@ plugins {
group = 'org.elasticsearch.gradle'
if (GradleVersion.current() < GradleVersion.version('3.3')) {
throw new GradleException('Gradle 3.3+ is required to build elasticsearch')
if (GradleVersion.current() < GradleVersion.version('4.9')) {
throw new GradleException('Gradle 4.9+ is required to build elasticsearch')
}
if (JavaVersion.current() < JavaVersion.VERSION_1_8) {

View File

@ -67,6 +67,9 @@ class BuildPlugin implements Plugin<Project> {
+ 'elasticearch.standalone-rest-test, and elasticsearch.build '
+ 'are mutually exclusive')
}
if (GradleVersion.current() < GradleVersion.version('4.9')) {
throw new GradleException('Gradle 4.9+ is required to use elasticsearch.build plugin')
}
project.pluginManager.apply('java')
project.pluginManager.apply('carrotsearch.randomized-testing')
// these plugins add lots of info to our jars

View File

@ -40,7 +40,7 @@ import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments;
public class RankEvalIT extends ESRestHighLevelClientTestCase {
@ -84,7 +84,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
Map<String, EvalQueryQuality> partialResults = response.getPartialResults();
assertEquals(2, partialResults.size());
EvalQueryQuality amsterdamQueryQuality = partialResults.get("amsterdam_query");
assertEquals(2, filterUnknownDocuments(amsterdamQueryQuality.getHitsAndRatings()).size());
assertEquals(2, filterUnratedDocuments(amsterdamQueryQuality.getHitsAndRatings()).size());
List<RatedSearchHit> hitsAndRatings = amsterdamQueryQuality.getHitsAndRatings();
assertEquals(7, hitsAndRatings.size());
for (RatedSearchHit hit : hitsAndRatings) {
@ -96,7 +96,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
}
}
EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query");
assertEquals(6, filterUnknownDocuments(berlinQueryQuality.getHitsAndRatings()).size());
assertEquals(6, filterUnratedDocuments(berlinQueryQuality.getHitsAndRatings()).size());
hitsAndRatings = berlinQueryQuality.getHitsAndRatings();
assertEquals(7, hitsAndRatings.size());
for (RatedSearchHit hit : hitsAndRatings) {

View File

@ -49,7 +49,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os
return copySpec {
into("elasticsearch-${version}") {
into('lib') {
with libFiles
with libFiles(oss)
}
into('config') {
dirMode 0750

View File

@ -227,7 +227,8 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
/*****************************************************************************
* Common files in all distributions *
*****************************************************************************/
libFiles = copySpec {
libFiles = { oss ->
copySpec {
// delay by using closures, since they have not yet been configured, so no jar task exists yet
from { project(':server').jar }
from { project(':server').configurations.runtime }
@ -238,6 +239,13 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
from { project(':distribution:tools:plugin-cli').jar }
from { project(':distribution:tools:plugin-cli').configurations.runtime }
}
if (oss == false) {
into('tools/security-cli') {
from { project(':x-pack:plugin:security:cli').jar }
from { project(':x-pack:plugin:security:cli').configurations.compile }
}
}
}
}
modulesFiles = { oss ->

View File

@ -126,7 +126,7 @@ Closure commonPackageConfig(String type, boolean oss) {
}
into('lib') {
with copySpec {
with libFiles
with libFiles(oss)
// we need to specify every intermediate directory so we iterate through the parents; duplicate calls with the same part are fine
eachFile { FileCopyDetails fcp ->
String[] segments = fcp.relativePath.segments

View File

@ -26,14 +26,14 @@ include::install_remove.asciidoc[]
| `field` | yes | - | The field to get the ip address from for the geographical lookup.
| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database.
| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb, GeoLite2-Country.mmdb and GeoLite2-ASN.mmdb files.
| `properties` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup.
| `properties` | no | [`continent_name`, `country_iso_code`, `region_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup.
| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document
|======
*Depends on what is available in `database_field`:
* If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`,
`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
`country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
and `location`. The fields actually added depend on what has been found and which properties were configured in `properties`.
* If the GeoLite2 Country database is used, then the following fields may be added under the `target_field`: `ip`,
`country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which properties

View File

@ -1732,6 +1732,10 @@ For example, if you have a log message which contains `ip=1.2.3.4 error=REFUSED`
| `include_keys` | no | `null` | List of keys to filter and insert into document. Defaults to including all keys
| `exclude_keys` | no | `null` | List of keys to exclude from document
| `ignore_missing` | no | `false` | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document
| `prefix` | no | `null` | Prefix to be added to extracted keys
| `trim_key` | no | `null` | String of characters to trim from extracted keys
| `trim_value` | no | `null` | String of characters to trim from extracted values
| `strip_brackets` | no | `false` | If `true` strip brackets `()`, `<>`, `[]` as well as quotes `'` and `"` from extracted values
|======

View File

@ -74,7 +74,7 @@ field alias to query over multiple target fields in a single clause.
==== Unsupported APIs
Writes to field aliases are not supported: attempting to use an alias in an index or update request
will result in a failure. Likewise, aliases cannot be used as the target of `copy_to`.
will result in a failure. Likewise, aliases cannot be used as the target of `copy_to` or in multi-fields.
Because alias names are not present in the document source, aliases cannot be used when performing
source filtering. For example, the following request will return an empty result for `_source`:

View File

@ -79,3 +79,11 @@ the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in
==== The deprecated stored script contexts have now been removed
When putting stored scripts, support for storing them with the deprecated `template` context or without a context is
now removed. Scripts must be stored using the `script` context as mentioned in the documentation.
==== Get Aliases API limitations when {security} is enabled removed
The behavior and response codes of the get aliases API no longer vary
depending on whether {security} is enabled. Previously a
404 - NOT FOUND (IndexNotFoundException) could be returned in case the
current user was not authorized for any alias. An empty response with
status 200 - OK is now returned instead at all times.

View File

@ -274,7 +274,7 @@ that shows potential errors of individual queries. The response has the followin
"details": {
"my_query_id1": { <2>
"quality_level": 0.6, <3>
"unknown_docs": [ <4>
"unrated_docs": [ <4>
{
"_index": "my_index",
"_id": "1960795"
@ -309,7 +309,7 @@ that shows potential errors of individual queries. The response has the followin
<1> the overall evaluation quality calculated by the defined metric
<2> the `details` section contains one entry for every query in the original `requests` section, keyed by the search request id
<3> the `quality_level` in the `details` section shows the contribution of this query to the global quality score
<4> the `unknown_docs` section contains an `_index` and `_id` entry for each document in the search result for this
<4> the `unrated_docs` section contains an `_index` and `_id` entry for each document in the search result for this
query that didn't have a ratings value. This can be used to ask the user to supply ratings for these documents
<5> the `hits` section shows a grouping of the search results with their supplied rating
<6> the `metric_details` give additional information about the calculated quality metric (e.g. how many of the retrieved

View File

@ -85,10 +85,6 @@ You can update this setting through the
Sets the timeout for collecting index statistics. Defaults to `10s`.
`xpack.monitoring.collection.indices.stats.timeout`::
Sets the timeout for collecting total indices statistics. Defaults to `10s`.
`xpack.monitoring.collection.index.recovery.active_only`::
Controls whether or not all recoveries are collected. Set to `true` to

View File

@ -47,6 +47,8 @@ include::set-paths-tip.asciidoc[]
Use the `elasticsearch-plugin` script to install the upgraded version of each
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
a node.
+
include::remove-xpack.asciidoc[]
. *Start each upgraded node.*
+

View File

@ -0,0 +1,4 @@
IMPORTANT: If you use {xpack} and are upgrading from a version prior to 6.3,
remove {xpack} before restarting: `bin/elasticsearch-plugin remove x-pack`. As
of 6.3, {xpack} is included in the default distribution. The node will fail to
start if the old {xpack} plugin is present.

View File

@ -53,6 +53,8 @@ include::set-paths-tip.asciidoc[]
Use the `elasticsearch-plugin` script to install the upgraded version of each
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
a node.
+
include::remove-xpack.asciidoc[]
. *Start the upgraded node.*
+

View File

@ -20,11 +20,17 @@
esplugin {
description 'Module for ingest processors that do not require additional security permissions or have large dependencies and resources'
classname 'org.elasticsearch.ingest.common.IngestCommonPlugin'
extendedPlugins = ['lang-painless']
}
dependencies {
compileOnly project(':modules:lang-painless')
compile project(':libs:grok')
}
compileJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes"
compileTestJava.options.compilerArgs << "-Xlint:-unchecked,-rawtypes"
integTestCluster {
module project(':modules:lang-painless')
}

View File

@ -35,9 +35,13 @@ public final class BytesProcessor extends AbstractStringProcessor {
super(processorTag, field, ignoreMissing, targetField);
}
public static long apply(String value) {
return ByteSizeValue.parseBytesSizeValue(value, null, "Ingest Field").getBytes();
}
@Override
protected Long process(String value) {
return ByteSizeValue.parseBytesSizeValue(value, null, getField()).getBytes();
return apply(value);
}
@Override

View File

@ -67,10 +67,8 @@ public final class JsonProcessor extends AbstractProcessor {
return addToRoot;
}
@Override
public void execute(IngestDocument document) throws Exception {
Object fieldValue = document.getFieldValue(field, Object.class);
BytesReference bytesRef = (fieldValue == null) ? new BytesArray("null") : new BytesArray(fieldValue.toString());
public static Object apply(Object fieldValue) {
BytesReference bytesRef = fieldValue == null ? new BytesArray("null") : new BytesArray(fieldValue.toString());
try (InputStream stream = bytesRef.streamInput();
XContentParser parser = JsonXContent.jsonXContent
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) {
@ -91,20 +89,32 @@ public final class JsonProcessor extends AbstractProcessor {
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
throw new IllegalArgumentException("cannot read binary value");
}
if (addToRoot && (value instanceof Map)) {
for (Map.Entry<String, Object> entry : ((Map<String, Object>) value).entrySet()) {
document.setFieldValue(entry.getKey(), entry.getValue());
}
} else if (addToRoot) {
throw new IllegalArgumentException("cannot add non-map fields to root of document");
} else {
document.setFieldValue(targetField, value);
}
return value;
} catch (IOException e) {
throw new IllegalArgumentException(e);
}
}
public static void apply(Map<String, Object> ctx, String fieldName) {
Object value = apply(ctx.get(fieldName));
if (value instanceof Map) {
@SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) value;
ctx.putAll(map);
} else {
throw new IllegalArgumentException("cannot add non-map fields to root of document");
}
}
@Override
public void execute(IngestDocument document) throws Exception {
if (addToRoot) {
apply(document.getSourceAndMetadata(), field);
} else {
document.setFieldValue(targetField, apply(document.getFieldValue(field, Object.class)));
}
}
@Override
public String getType() {
return TYPE;

View File

@ -25,11 +25,14 @@ import org.elasticsearch.ingest.ConfigurationUtils;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.regex.Pattern;
/**
* The KeyValueProcessor parses and extracts messages of the `key=value` variety into fields with values of the keys.
@ -38,6 +41,8 @@ public final class KeyValueProcessor extends AbstractProcessor {
public static final String TYPE = "kv";
private static final Pattern STRIP_BRACKETS = Pattern.compile("(^[\\(\\[<\"'])|([\\]\\)>\"']$)");
private final String field;
private final String fieldSplit;
private final String valueSplit;
@ -45,9 +50,11 @@ public final class KeyValueProcessor extends AbstractProcessor {
private final Set<String> excludeKeys;
private final String targetField;
private final boolean ignoreMissing;
private final Consumer<IngestDocument> execution;
KeyValueProcessor(String tag, String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
Set<String> excludeKeys, String targetField, boolean ignoreMissing) {
Set<String> excludeKeys, String targetField, boolean ignoreMissing,
String trimKey, String trimValue, boolean stripBrackets, String prefix) {
super(tag);
this.field = field;
this.targetField = targetField;
@ -56,6 +63,92 @@ public final class KeyValueProcessor extends AbstractProcessor {
this.includeKeys = includeKeys;
this.excludeKeys = excludeKeys;
this.ignoreMissing = ignoreMissing;
this.execution = buildExecution(
fieldSplit, valueSplit, field, includeKeys, excludeKeys, targetField, ignoreMissing, trimKey, trimValue,
stripBrackets, prefix
);
}
private static Consumer<IngestDocument> buildExecution(String fieldSplit, String valueSplit, String field,
Set<String> includeKeys, Set<String> excludeKeys,
String targetField, boolean ignoreMissing,
String trimKey, String trimValue, boolean stripBrackets,
String prefix) {
final Predicate<String> keyFilter;
if (includeKeys == null) {
if (excludeKeys == null) {
keyFilter = key -> true;
} else {
keyFilter = key -> excludeKeys.contains(key) == false;
}
} else {
if (excludeKeys == null) {
keyFilter = includeKeys::contains;
} else {
keyFilter = key -> includeKeys.contains(key) && excludeKeys.contains(key) == false;
}
}
final String fieldPathPrefix;
String keyPrefix = prefix == null ? "" : prefix;
if (targetField == null) {
fieldPathPrefix = keyPrefix;
} else {
fieldPathPrefix = targetField + "." + keyPrefix;
}
final Function<String, String> keyPrefixer;
if (fieldPathPrefix.isEmpty()) {
keyPrefixer = val -> val;
} else {
keyPrefixer = val -> fieldPathPrefix + val;
}
final Function<String, String[]> fieldSplitter = buildSplitter(fieldSplit, true);
Function<String, String[]> valueSplitter = buildSplitter(valueSplit, false);
final Function<String, String> keyTrimmer = buildTrimmer(trimKey);
final Function<String, String> bracketStrip;
if (stripBrackets) {
bracketStrip = val -> STRIP_BRACKETS.matcher(val).replaceAll("");
} else {
bracketStrip = val -> val;
}
final Function<String, String> valueTrimmer = buildTrimmer(trimValue);
return document -> {
String value = document.getFieldValue(field, String.class, ignoreMissing);
if (value == null) {
if (ignoreMissing) {
return;
}
throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs.");
}
for (String part : fieldSplitter.apply(value)) {
String[] kv = valueSplitter.apply(part);
if (kv.length != 2) {
throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]");
}
String key = keyTrimmer.apply(kv[0]);
if (keyFilter.test(key)) {
append(document, keyPrefixer.apply(key), valueTrimmer.apply(bracketStrip.apply(kv[1])));
}
}
};
}
private static Function<String, String> buildTrimmer(String trim) {
if (trim == null) {
return val -> val;
} else {
Pattern pattern = Pattern.compile("(^([" + trim + "]+))|([" + trim + "]+$)");
return val -> pattern.matcher(val).replaceAll("");
}
}
private static Function<String, String[]> buildSplitter(String split, boolean fields) {
int limit = fields ? 0 : 2;
if (split.length() > 2 || split.length() == 2 && split.charAt(0) != '\\') {
Pattern splitPattern = Pattern.compile(split);
return val -> splitPattern.split(val, limit);
} else {
return val -> val.split(split, limit);
}
}
String getField() {
@ -86,7 +179,7 @@ public final class KeyValueProcessor extends AbstractProcessor {
return ignoreMissing;
}
public void append(IngestDocument document, String targetField, String value) {
private static void append(IngestDocument document, String targetField, String value) {
if (document.hasField(targetField)) {
document.appendFieldValue(targetField, value);
} else {
@ -96,27 +189,7 @@ public final class KeyValueProcessor extends AbstractProcessor {
@Override
public void execute(IngestDocument document) {
String oldVal = document.getFieldValue(field, String.class, ignoreMissing);
if (oldVal == null && ignoreMissing) {
return;
} else if (oldVal == null) {
throw new IllegalArgumentException("field [" + field + "] is null, cannot extract key-value pairs.");
}
String fieldPathPrefix = (targetField == null) ? "" : targetField + ".";
Arrays.stream(oldVal.split(fieldSplit))
.map((f) -> {
String[] kv = f.split(valueSplit, 2);
if (kv.length != 2) {
throw new IllegalArgumentException("field [" + field + "] does not contain value_split [" + valueSplit + "]");
}
return kv;
})
.filter((p) ->
(includeKeys == null || includeKeys.contains(p[0])) &&
(excludeKeys == null || excludeKeys.contains(p[0]) == false))
.forEach((p) -> append(document, fieldPathPrefix + p[0], p[1]));
execution.accept(document);
}
@Override
@ -132,6 +205,11 @@ public final class KeyValueProcessor extends AbstractProcessor {
String targetField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "target_field");
String fieldSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field_split");
String valueSplit = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "value_split");
String trimKey = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_key");
String trimValue = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "trim_value");
String prefix = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "prefix");
boolean stripBrackets =
ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "strip_brackets", false);
Set<String> includeKeys = null;
Set<String> excludeKeys = null;
List<String> includeKeysList = ConfigurationUtils.readOptionalList(TYPE, processorTag, config, "include_keys");
@ -143,7 +221,10 @@ public final class KeyValueProcessor extends AbstractProcessor {
excludeKeys = Collections.unmodifiableSet(Sets.newHashSet(excludeKeysList));
}
boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
return new KeyValueProcessor(processorTag, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing);
return new KeyValueProcessor(
processorTag, field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing,
trimKey, trimValue, stripBrackets, prefix
);
}
}
}

View File

@ -35,9 +35,13 @@ public final class LowercaseProcessor extends AbstractStringProcessor {
super(processorTag, field, ignoreMissing, targetField);
}
public static String apply(String value) {
return value.toLowerCase(Locale.ROOT);
}
@Override
protected String process(String value) {
return value.toLowerCase(Locale.ROOT);
return apply(value);
}
@Override

View File

@ -17,23 +17,33 @@
* under the License.
*/
package org.elasticsearch.index.settings;
package org.elasticsearch.ingest.common;
import org.elasticsearch.common.inject.BindingAnnotation;
import java.util.Map;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
@BindingAnnotation
@Target({FIELD, PARAMETER})
@Retention(RUNTIME)
@Documented
public @interface IndexDynamicSettings {
public final class Processors {
public static long bytes(String value) {
return BytesProcessor.apply(value);
}
public static String lowercase(String value) {
return LowercaseProcessor.apply(value);
}
public static String uppercase(String value) {
return UppercaseProcessor.apply(value);
}
public static Object json(Object fieldValue) {
return JsonProcessor.apply(fieldValue);
}
public static void json(Map<String, Object> ctx, String field) {
JsonProcessor.apply(ctx, field);
}
public static String urlDecode(String value) {
return URLDecodeProcessor.apply(value);
}
}

View File

@ -0,0 +1,41 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.ingest.common;
import org.elasticsearch.painless.spi.PainlessExtension;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistLoader;
import org.elasticsearch.script.IngestScript;
import org.elasticsearch.script.ScriptContext;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class ProcessorsWhitelistExtension implements PainlessExtension {
private static final Whitelist WHITELIST =
WhitelistLoader.loadFromResourceFiles(ProcessorsWhitelistExtension.class, "processors_whitelist.txt");
@Override
public Map<ScriptContext<?>, List<Whitelist>> getContextWhitelists() {
return Collections.singletonMap(IngestScript.CONTEXT, Collections.singletonList(WHITELIST));
}
}

View File

@ -34,15 +34,19 @@ public final class URLDecodeProcessor extends AbstractStringProcessor {
super(processorTag, field, ignoreMissing, targetField);
}
@Override
protected String process(String value) {
public static String apply(String value) {
try {
return URLDecoder.decode(value, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new IllegalArgumentException("could not URL-decode field[" + getField() + "]", e);
throw new IllegalArgumentException("Could not URL-decode value.", e);
}
}
@Override
protected String process(String value) {
return apply(value);
}
@Override
public String getType() {
return TYPE;

View File

@ -34,9 +34,13 @@ public final class UppercaseProcessor extends AbstractStringProcessor {
super(processorTag, field, ignoreMissing, targetField);
}
public static String apply(String value) {
return value.toUpperCase(Locale.ROOT);
}
@Override
protected String process(String value) {
return value.toUpperCase(Locale.ROOT);
return apply(value);
}
@Override

View File

@ -0,0 +1 @@
org.elasticsearch.ingest.common.ProcessorsWhitelistExtension

View File

@ -0,0 +1,29 @@
#
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# This file contains a whitelist of static processor methods that can be accessed from painless
class org.elasticsearch.ingest.common.Processors {
long bytes(String)
String lowercase(String)
String uppercase(String)
Object json(Object)
void json(Map, String)
String urlDecode(String)
}

View File

@ -63,7 +63,7 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase {
Processor processor = newProcessor(fieldName, randomBoolean(), fieldName);
ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> processor.execute(ingestDocument));
assertThat(exception.getMessage(),
CoreMatchers.equalTo("failed to parse setting [" + fieldName + "] with value [8912pb] as a size in bytes"));
CoreMatchers.equalTo("failed to parse setting [Ingest Field] with value [8912pb] as a size in bytes"));
assertThat(exception.getCause().getMessage(),
CoreMatchers.containsString("Values greater than 9223372036854775807 bytes are not supported"));
}
@ -93,6 +93,6 @@ public class BytesProcessorTests extends AbstractStringProcessorTestCase {
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue(fieldName, expectedResultType()), equalTo(1126L));
assertWarnings("Fractional bytes values are deprecated. Use non-fractional bytes values instead: [1.1kb] found for setting " +
"[" + fieldName + "]");
"[Ingest Field]");
}
}

View File

@ -25,19 +25,25 @@ import org.elasticsearch.ingest.Processor;
import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.test.ESTestCase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
import static org.hamcrest.Matchers.equalTo;
public class KeyValueProcessorTests extends ESTestCase {
private static final KeyValueProcessor.Factory FACTORY = new KeyValueProcessor.Factory();
public void test() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, null, "target", false);
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
@ -46,7 +52,7 @@ public class KeyValueProcessorTests extends ESTestCase {
public void testRootTarget() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
ingestDocument.setFieldValue("myField", "first=hello&second=world&second=universe");
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "myField", "&", "=", null, null,null, false);
Processor processor = createKvProcessor("myField", "&", "=", null, null,null, false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("first", String.class), equalTo("hello"));
assertThat(ingestDocument.getFieldValue("second", List.class), equalTo(Arrays.asList("world", "universe")));
@ -55,7 +61,7 @@ public class KeyValueProcessorTests extends ESTestCase {
public void testKeySameAsSourceField() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
ingestDocument.setFieldValue("first", "first=hello");
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "first", "&", "=", null, null,null, false);
Processor processor = createKvProcessor("first", "&", "=", null, null,null, false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("first", List.class), equalTo(Arrays.asList("first=hello", "hello")));
}
@ -63,7 +69,7 @@ public class KeyValueProcessorTests extends ESTestCase {
public void testIncludeKeys() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=",
Processor processor = createKvProcessor(fieldName, "&", "=",
Sets.newHashSet("first"), null, "target", false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
@ -73,7 +79,7 @@ public class KeyValueProcessorTests extends ESTestCase {
public void testExcludeKeys() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=",
Processor processor = createKvProcessor(fieldName, "&", "=",
null, Sets.newHashSet("second"), "target", false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
@ -84,7 +90,7 @@ public class KeyValueProcessorTests extends ESTestCase {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument,
"first=hello&second=world&second=universe&third=bar");
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=",
Processor processor = createKvProcessor(fieldName, "&", "=",
Sets.newHashSet("first", "second"), Sets.newHashSet("first", "second"), "target", false);
processor.execute(ingestDocument);
assertFalse(ingestDocument.hasField("target.first"));
@ -92,9 +98,9 @@ public class KeyValueProcessorTests extends ESTestCase {
assertFalse(ingestDocument.hasField("target.third"));
}
public void testMissingField() {
public void testMissingField() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "&",
Processor processor = createKvProcessor("unknown", "&",
"=", null, null, "target", false);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
assertThat(exception.getMessage(), equalTo("field [unknown] not present as part of path [unknown]"));
@ -105,7 +111,7 @@ public class KeyValueProcessorTests extends ESTestCase {
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(),
Collections.singletonMap(fieldName, null));
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "", "", null, null, "target", true);
Processor processor = createKvProcessor(fieldName, "", "", null, null, "target", true);
processor.execute(ingestDocument);
assertIngestDocument(originalIngestDocument, ingestDocument);
}
@ -113,7 +119,7 @@ public class KeyValueProcessorTests extends ESTestCase {
public void testNonExistentWithIgnoreMissing() throws Exception {
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "unknown", "", "", null, null, "target", true);
Processor processor = createKvProcessor("unknown", "", "", null, null, "target", true);
processor.execute(ingestDocument);
assertIngestDocument(originalIngestDocument, ingestDocument);
}
@ -121,7 +127,7 @@ public class KeyValueProcessorTests extends ESTestCase {
public void testFailFieldSplitMatch() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello|second=world|second=universe");
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), fieldName, "&", "=", null, null, "target", false);
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello|second=world|second=universe"));
assertFalse(ingestDocument.hasField("target.second"));
@ -129,8 +135,94 @@ public class KeyValueProcessorTests extends ESTestCase {
public void testFailValueSplitMatch() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("foo", "bar"));
Processor processor = new KeyValueProcessor(randomAlphaOfLength(10), "foo", "&", "=", null, null, "target", false);
Processor processor = createKvProcessor("foo", "&", "=", null, null, "target", false);
Exception exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument));
assertThat(exception.getMessage(), equalTo("field [foo] does not contain value_split [=]"));
}
public void testTrimKeyAndValue() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first= hello &second=world& second =universe");
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, " ", " ", false, null);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
}
public void testTrimMultiCharSequence() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument,
"to=<foo@example.com>, orig_to=<bar@example.com>, %+relay=mail.example.com[private/dovecot-lmtp]," +
" delay=2.2, delays=1.9/0.01/0.01/0.21, dsn=2.0.0, status=sent "
);
Processor processor = createKvProcessor(fieldName, " ", "=", null, null, "target", false, "%+", "<>,", false, null);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.to", String.class), equalTo("foo@example.com"));
assertThat(ingestDocument.getFieldValue("target.orig_to", String.class), equalTo("bar@example.com"));
assertThat(ingestDocument.getFieldValue("target.relay", String.class), equalTo("mail.example.com[private/dovecot-lmtp]"));
assertThat(ingestDocument.getFieldValue("target.delay", String.class), equalTo("2.2"));
assertThat(ingestDocument.getFieldValue("target.delays", String.class), equalTo("1.9/0.01/0.01/0.21"));
assertThat(ingestDocument.getFieldValue("target.dsn", String.class), equalTo("2.0.0"));
assertThat(ingestDocument.getFieldValue("target.status", String.class), equalTo("sent"));
}
public void testStripBrackets() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(
random(), ingestDocument, "first=<hello>&second=\"world\"&second=(universe)&third=<foo>&fourth=[bar]&fifth='last'"
);
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, true, null);
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.first", String.class), equalTo("hello"));
assertThat(ingestDocument.getFieldValue("target.second", List.class), equalTo(Arrays.asList("world", "universe")));
assertThat(ingestDocument.getFieldValue("target.third", String.class), equalTo("foo"));
assertThat(ingestDocument.getFieldValue("target.fourth", String.class), equalTo("bar"));
assertThat(ingestDocument.getFieldValue("target.fifth", String.class), equalTo("last"));
}
public void testAddPrefix() throws Exception {
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "first=hello&second=world&second=universe");
Processor processor = createKvProcessor(fieldName, "&", "=", null, null, "target", false, null, null, false, "arg_");
processor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("target.arg_first", String.class), equalTo("hello"));
assertThat(ingestDocument.getFieldValue("target.arg_second", List.class), equalTo(Arrays.asList("world", "universe")));
}
private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
Set<String> excludeKeys, String targetField,
boolean ignoreMissing) throws Exception {
return createKvProcessor(
field, fieldSplit, valueSplit, includeKeys, excludeKeys, targetField, ignoreMissing, null, null, false, null
);
}
private static KeyValueProcessor createKvProcessor(String field, String fieldSplit, String valueSplit, Set<String> includeKeys,
Set<String> excludeKeys, String targetField, boolean ignoreMissing,
String trimKey, String trimValue, boolean stripBrackets,
String prefix) throws Exception {
Map<String, Object> config = new HashMap<>();
config.put("field", field);
config.put("field_split", fieldSplit);
config.put("value_split", valueSplit);
config.put("target_field", targetField);
if (includeKeys != null) {
config.put("include_keys", new ArrayList<>(includeKeys));
}
if (excludeKeys != null) {
config.put("exclude_keys", new ArrayList<>(excludeKeys));
}
config.put("ignore_missing", ignoreMissing);
if (trimKey != null) {
config.put("trim_key", trimKey);
}
if (trimValue != null) {
config.put("trim_value", trimValue);
}
config.put("strip_brackets", stripBrackets);
if (prefix != null) {
config.put("prefix", prefix);
}
return FACTORY.create(null, randomAlphaOfLength(10), config);
}
}

View File

@ -0,0 +1,216 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
---
"Test invoke bytes processor":
- do:
ingest.put_pipeline:
id: "my_pipeline"
body: >
{
"description": "_description",
"processors": [
{
"script" : {
"lang": "painless",
"source" : "ctx.target_field = Processors.bytes(ctx.source_field)"
}
}
]
}
- match: { acknowledged: true }
- do:
index:
index: test
type: test
id: 1
pipeline: "my_pipeline"
body: {source_field: "1kb"}
- do:
get:
index: test
type: test
id: 1
- match: { _source.source_field: "1kb" }
- match: { _source.target_field: 1024 }
---
"Test invoke lowercase processor":
- do:
ingest.put_pipeline:
id: "my_pipeline"
body: >
{
"description": "_description",
"processors": [
{
"script" : {
"lang": "painless",
"source" : "ctx.target_field = Processors.lowercase(ctx.source_field)"
}
}
]
}
- match: { acknowledged: true }
- do:
index:
index: test
type: test
id: 1
pipeline: "my_pipeline"
body: {source_field: "FooBar"}
- do:
get:
index: test
type: test
id: 1
- match: { _source.source_field: "FooBar" }
- match: { _source.target_field: "foobar" }
---
"Test invoke uppercase processor":
- do:
ingest.put_pipeline:
id: "my_pipeline"
body: >
{
"description": "_description",
"processors": [
{
"script" : {
"lang": "painless",
"source" : "ctx.target_field = Processors.uppercase(ctx.source_field)"
}
}
]
}
- match: { acknowledged: true }
- do:
index:
index: test
type: test
id: 1
pipeline: "my_pipeline"
body: {source_field: "FooBar"}
- do:
get:
index: test
type: test
id: 1
- match: { _source.source_field: "FooBar" }
- match: { _source.target_field: "FOOBAR" }
---
"Test invoke json processor, assign to field":
- do:
ingest.put_pipeline:
id: "my_pipeline"
body: >
{
"description": "_description",
"processors": [
{
"script" : {
"lang": "painless",
"source" : "ctx.target_field = Processors.json(ctx.source_field)"
}
}
]
}
- match: { acknowledged: true }
- do:
index:
index: test
type: test
id: 1
pipeline: "my_pipeline"
body: {source_field: "{\"foo\":\"bar\"}"}
- do:
get:
index: test
type: test
id: 1
- match: { _source.source_field: "{\"foo\":\"bar\"}" }
- match: { _source.target_field.foo: "bar" }
---
"Test invoke json processor, assign to root":
- do:
ingest.put_pipeline:
id: "my_pipeline"
body: >
{
"description": "_description",
"processors": [
{
"script" : {
"lang": "painless",
"source" : "Processors.json(ctx, 'source_field')"
}
}
]
}
- match: { acknowledged: true }
- do:
index:
index: test
type: test
id: 1
pipeline: "my_pipeline"
body: {source_field: "{\"foo\":\"bar\"}"}
- do:
get:
index: test
type: test
id: 1
- match: { _source.source_field: "{\"foo\":\"bar\"}" }
- match: { _source.foo: "bar" }
---
"Test invoke urlDecode processor":
- do:
ingest.put_pipeline:
id: "my_pipeline"
body: >
{
"description": "_description",
"processors": [
{
"script" : {
"lang": "painless",
"source" : "ctx.target_field = Processors.urlDecode(ctx.source_field)"
}
}
]
}
- match: { acknowledged: true }
- do:
index:
index: test
type: test
id: 1
pipeline: "my_pipeline"
body: {source_field: "foo%20bar"}
- do:
get:
index: test
type: test
id: 1
- match: { _source.source_field: "foo%20bar" }
- match: { _source.target_field: "foo bar" }

View File

@ -1,7 +1,5 @@
{
"index_patterns": [
"filebeat-6.0.0-*"
],
"index_patterns": ["filebeat-6.0.0-*"],
"mappings": {
"doc": {
"_meta": {
@ -67,12 +65,14 @@
"type": "keyword"
},
"country_iso_code": {
"ignore_above": 1024,
"type": "keyword"
},
"location": {
"type": "geo_point"
},
"region_iso_code": {
"type": "keyword"
},
"region_name": {
"ignore_above": 1024,
"type": "keyword"

View File

@ -102,8 +102,8 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(queryId);
builder.field(QUALITY_LEVEL_FIELD.getPreferredName(), this.evaluationResult);
builder.startArray(UNKNOWN_DOCS_FIELD.getPreferredName());
for (DocumentKey key : EvaluationMetric.filterUnknownDocuments(ratedHits)) {
builder.startArray(UNRATED_DOCS_FIELD.getPreferredName());
for (DocumentKey key : EvaluationMetric.filterUnratedDocuments(ratedHits)) {
builder.startObject();
builder.field(RatedDocument.INDEX_FIELD.getPreferredName(), key.getIndex());
builder.field(RatedDocument.DOC_ID_FIELD.getPreferredName(), key.getDocId());
@ -123,7 +123,7 @@ public class EvalQueryQuality implements ToXContentFragment, Writeable {
}
private static final ParseField QUALITY_LEVEL_FIELD = new ParseField("quality_level");
private static final ParseField UNKNOWN_DOCS_FIELD = new ParseField("unknown_docs");
private static final ParseField UNRATED_DOCS_FIELD = new ParseField("unrated_docs");
private static final ParseField HITS_FIELD = new ParseField("hits");
private static final ParseField METRIC_DETAILS_FIELD = new ParseField("metric_details");
private static final ObjectParser<ParsedEvalQueryQuality, Void> PARSER = new ObjectParser<>("eval_query_quality",

View File

@ -76,10 +76,9 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable {
/**
* filter @link {@link RatedSearchHit} that don't have a rating
*/
static List<DocumentKey> filterUnknownDocuments(List<RatedSearchHit> ratedHits) {
List<DocumentKey> unknownDocs = ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false)
static List<DocumentKey> filterUnratedDocuments(List<RatedSearchHit> ratedHits) {
return ratedHits.stream().filter(hit -> hit.getRating().isPresent() == false)
.map(hit -> new DocumentKey(hit.getSearchHit().getIndex(), hit.getSearchHit().getId())).collect(Collectors.toList());
return unknownDocs;
}
/**

View File

@ -40,7 +40,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.hamcrest.CoreMatchers.containsString;
@ -128,7 +128,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
EvalQueryQuality result = dcg.evaluate("id", hits, rated);
assertEquals(12.779642067948913, result.getQualityLevel(), DELTA);
assertEquals(2, filterUnknownDocuments(result.getHitsAndRatings()).size());
assertEquals(2, filterUnratedDocuments(result.getHitsAndRatings()).size());
/**
* Check with normalization: to get the maximal possible dcg, sort documents by
@ -185,7 +185,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
assertEquals(12.392789260714371, result.getQualityLevel(), DELTA);
assertEquals(1, filterUnknownDocuments(result.getHitsAndRatings()).size());
assertEquals(1, filterUnratedDocuments(result.getHitsAndRatings()).size());
/**
* Check with normalization: to get the maximal possible dcg, sort documents by
@ -224,13 +224,13 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
assertEquals(0.0d, result.getQualityLevel(), DELTA);
assertEquals(0, filterUnknownDocuments(result.getHitsAndRatings()).size());
assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size());
// also check normalized
dcg = new DiscountedCumulativeGain(true, null, 10);
result = dcg.evaluate("id", hits, ratedDocs);
assertEquals(0.0d, result.getQualityLevel(), DELTA);
assertEquals(0, filterUnknownDocuments(result.getHitsAndRatings()).size());
assertEquals(0, filterUnratedDocuments(result.getHitsAndRatings()).size());
}
public void testParseFromXContent() throws IOException {

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.rankeval.RatedDocument.DocumentKey;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.test.ESTestCase;
@ -52,11 +51,6 @@ public class EvalQueryQualityTests extends ESTestCase {
}
public static EvalQueryQuality randomEvalQueryQuality() {
List<DocumentKey> unknownDocs = new ArrayList<>();
int numberOfUnknownDocs = randomInt(5);
for (int i = 0; i < numberOfUnknownDocs; i++) {
unknownDocs.add(new DocumentKey(randomAlphaOfLength(10), randomAlphaOfLength(10)));
}
int numberOfSearchHits = randomInt(5);
List<RatedSearchHit> ratedHits = new ArrayList<>();
for (int i = 0; i < numberOfSearchHits; i++) {

View File

@ -40,7 +40,7 @@ import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments;
import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnratedDocuments;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.instanceOf;
@ -120,7 +120,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
for (Entry<String, EvalQueryQuality> entry : entrySet) {
EvalQueryQuality quality = entry.getValue();
if (entry.getKey() == "amsterdam_query") {
assertEquals(2, filterUnknownDocuments(quality.getHitsAndRatings()).size());
assertEquals(2, filterUnratedDocuments(quality.getHitsAndRatings()).size());
List<RatedSearchHit> hitsAndRatings = quality.getHitsAndRatings();
assertEquals(6, hitsAndRatings.size());
for (RatedSearchHit hit : hitsAndRatings) {
@ -133,7 +133,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
}
}
if (entry.getKey() == "berlin_query") {
assertEquals(5, filterUnknownDocuments(quality.getHitsAndRatings()).size());
assertEquals(5, filterUnratedDocuments(quality.getHitsAndRatings()).size());
List<RatedSearchHit> hitsAndRatings = quality.getHitsAndRatings();
assertEquals(6, hitsAndRatings.size());
for (RatedSearchHit hit : hitsAndRatings) {

View File

@ -158,7 +158,7 @@ public class RankEvalResponseTests extends ESTestCase {
" \"details\": {" +
" \"coffee_query\": {" +
" \"quality_level\": 0.1," +
" \"unknown_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," +
" \"unrated_docs\": [{\"_index\":\"index\",\"_id\":\"456\"}]," +
" \"hits\":[{\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"123\",\"_score\":1.0}," +
" \"rating\":5}," +
" {\"hit\":{\"_index\":\"index\",\"_type\":\"\",\"_id\":\"456\",\"_score\":1.0}," +

View File

@ -73,7 +73,7 @@ setup:
- match: { quality_level: 1}
- match: { details.amsterdam_query.quality_level: 1.0}
- match: { details.amsterdam_query.unknown_docs: [ {"_index": "foo", "_id": "doc4"}]}
- match: { details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]}
- match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 2, "docs_retrieved": 2}}
- length: { details.amsterdam_query.hits: 3}
@ -85,7 +85,7 @@ setup:
- is_false: details.amsterdam_query.hits.2.rating
- match: { details.berlin_query.quality_level: 1.0}
- match: { details.berlin_query.unknown_docs: [ {"_index": "foo", "_id": "doc4"}]}
- match: { details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc4"}]}
- match: { details.berlin_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}}
- length: { details.berlin_query.hits: 2}
- match: { details.berlin_query.hits.0.hit._id: "doc1" }
@ -155,9 +155,9 @@ setup:
- gt: {details.amsterdam_query.quality_level: 0.333}
- lt: {details.amsterdam_query.quality_level: 0.334}
- match: {details.amsterdam_query.metric_details.mean_reciprocal_rank: {"first_relevant": 3}}
- match: {details.amsterdam_query.unknown_docs: [ {"_index": "foo", "_id": "doc2"},
- match: {details.amsterdam_query.unrated_docs: [ {"_index": "foo", "_id": "doc2"},
{"_index": "foo", "_id": "doc3"} ]}
- match: {details.berlin_query.quality_level: 0.5}
- match: {details.berlin_query.metric_details.mean_reciprocal_rank: {"first_relevant": 2}}
- match: {details.berlin_query.unknown_docs: [ {"_index": "foo", "_id": "doc1"}]}
- match: {details.berlin_query.unrated_docs: [ {"_index": "foo", "_id": "doc1"}]}

View File

@ -73,7 +73,7 @@
- lt: {quality_level: 13.848264 }
- gt: {details.dcg_query.quality_level: 13.848263}
- lt: {details.dcg_query.quality_level: 13.848264}
- match: {details.dcg_query.unknown_docs: [ ]}
- match: {details.dcg_query.unrated_docs: [ ]}
# reverse the order in which the results are returned (less relevant docs first)
@ -100,7 +100,7 @@
- lt: {quality_level: 10.299675}
- gt: {details.dcg_query_reverse.quality_level: 10.299674}
- lt: {details.dcg_query_reverse.quality_level: 10.299675}
- match: {details.dcg_query_reverse.unknown_docs: [ ]}
- match: {details.dcg_query_reverse.unrated_docs: [ ]}
# if we mix both, we should get the average
@ -138,7 +138,7 @@
- lt: {quality_level: 12.073970}
- gt: {details.dcg_query.quality_level: 13.848263}
- lt: {details.dcg_query.quality_level: 13.848264}
- match: {details.dcg_query.unknown_docs: [ ]}
- match: {details.dcg_query.unrated_docs: [ ]}
- gt: {details.dcg_query_reverse.quality_level: 10.299674}
- lt: {details.dcg_query_reverse.quality_level: 10.299675}
- match: {details.dcg_query_reverse.unknown_docs: [ ]}
- match: {details.dcg_query_reverse.unrated_docs: [ ]}

View File

@ -36,7 +36,7 @@
- match: { quality_level: 1}
- match: { details.amsterdam_query.quality_level: 1.0}
- match: { details.amsterdam_query.unknown_docs: [ ]}
- match: { details.amsterdam_query.unrated_docs: [ ]}
- match: { details.amsterdam_query.metric_details.precision: {"relevant_docs_retrieved": 1, "docs_retrieved": 1}}
- is_true: failures.invalid_query

View File

@ -85,7 +85,7 @@ setup:
}
- match: {quality_level: 0.9}
- match: {details.amsterdam_query.unknown_docs.0._id: "6"}
- match: {details.amsterdam_query.unrated_docs.0._id: "6"}
---
"Test illegal request parts":

View File

@ -185,6 +185,16 @@ public final class GeoIpProcessor extends AbstractProcessor {
geoData.put("continent_name", continentName);
}
break;
case REGION_ISO_CODE:
// ISO 3166-2 code for country subdivisions.
// See iso.org/iso-3166-country-codes.html
String countryIso = country.getIsoCode();
String subdivisionIso = subdivision.getIsoCode();
if (countryIso != null && subdivisionIso != null) {
String regionIsoCode = countryIso + "-" + subdivisionIso;
geoData.put("region_iso_code", regionIsoCode);
}
break;
case REGION_NAME:
String subdivisionName = subdivision.getName();
if (subdivisionName != null) {
@ -300,8 +310,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
public static final class Factory implements Processor.Factory {
static final Set<Property> DEFAULT_CITY_PROPERTIES = EnumSet.of(
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME,
Property.CITY_NAME, Property.LOCATION
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_ISO_CODE,
Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION
);
static final Set<Property> DEFAULT_COUNTRY_PROPERTIES = EnumSet.of(
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE
@ -377,6 +387,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
COUNTRY_ISO_CODE,
COUNTRY_NAME,
CONTINENT_NAME,
REGION_ISO_CODE,
REGION_NAME,
CITY_NAME,
TIMEZONE,
@ -386,7 +397,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
static final EnumSet<Property> ALL_CITY_PROPERTIES = EnumSet.of(
Property.IP, Property.COUNTRY_ISO_CODE, Property.COUNTRY_NAME, Property.CONTINENT_NAME,
Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE, Property.LOCATION
Property.REGION_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE,
Property.LOCATION
);
static final EnumSet<Property> ALL_COUNTRY_PROPERTIES = EnumSet.of(
Property.IP, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE

View File

@ -284,7 +284,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
config1.put("properties", Collections.singletonList("invalid"));
Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config1));
assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " +
"COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]"));
"COUNTRY_NAME, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]"));
Map<String, Object> config2 = new HashMap<>();
config2.put("field", "_field");

View File

@ -117,11 +117,12 @@ public class GeoIpProcessorTests extends ESTestCase {
assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(address));
@SuppressWarnings("unchecked")
Map<String, Object> geoData = (Map<String, Object>) ingestDocument.getSourceAndMetadata().get("target_field");
assertThat(geoData.size(), equalTo(8));
assertThat(geoData.size(), equalTo(9));
assertThat(geoData.get("ip"), equalTo(address));
assertThat(geoData.get("country_iso_code"), equalTo("US"));
assertThat(geoData.get("country_name"), equalTo("United States"));
assertThat(geoData.get("continent_name"), equalTo("North America"));
assertThat(geoData.get("region_iso_code"), equalTo("US-FL"));
assertThat(geoData.get("region_name"), equalTo("Florida"));
assertThat(geoData.get("city_name"), equalTo("Hollywood"));
assertThat(geoData.get("timezone"), equalTo("America/New_York"));

View File

@ -30,11 +30,12 @@
type: test
id: 1
- match: { _source.field1: "128.101.101.101" }
- length: { _source.geoip: 5 }
- length: { _source.geoip: 6 }
- match: { _source.geoip.city_name: "Minneapolis" }
- match: { _source.geoip.country_iso_code: "US" }
- match: { _source.geoip.location.lon: -93.2166 }
- match: { _source.geoip.location.lat: 44.9759 }
- match: { _source.geoip.region_iso_code: "US-MN" }
- match: { _source.geoip.region_name: "Minnesota" }
- match: { _source.geoip.continent_name: "North America" }
@ -54,7 +55,7 @@
{
"geoip" : {
"field" : "field1",
"properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_name", "continent_name"]
"properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_iso_code", "region_name", "continent_name"]
}
}
]
@ -75,7 +76,7 @@
type: test
id: 1
- match: { _source.field1: "128.101.101.101" }
- length: { _source.geoip: 8 }
- length: { _source.geoip: 9 }
- match: { _source.geoip.city_name: "Minneapolis" }
- match: { _source.geoip.country_iso_code: "US" }
- match: { _source.geoip.ip: "128.101.101.101" }
@ -83,6 +84,7 @@
- match: { _source.geoip.location.lat: 44.9759 }
- match: { _source.geoip.timezone: "America/Chicago" }
- match: { _source.geoip.country_name: "United States" }
- match: { _source.geoip.region_iso_code: "US-MN" }
- match: { _source.geoip.region_name: "Minnesota" }
- match: { _source.geoip.continent_name: "North America" }
@ -188,11 +190,12 @@
type: test
id: 2
- match: { _source.field1: "128.101.101.101" }
- length: { _source.geoip: 5 }
- length: { _source.geoip: 6 }
- match: { _source.geoip.city_name: "Minneapolis" }
- match: { _source.geoip.country_iso_code: "US" }
- match: { _source.geoip.location.lon: -93.2166 }
- match: { _source.geoip.location.lat: 44.9759 }
- match: { _source.geoip.region_iso_code: "US-MN" }
- match: { _source.geoip.region_name: "Minnesota" }
- match: { _source.geoip.continent_name: "North America" }

View File

@ -19,9 +19,6 @@
package org.elasticsearch.upgrades;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version;
import org.elasticsearch.client.Request;
@ -34,7 +31,6 @@ import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.test.NotEqualMessageBuilder;
@ -45,7 +41,6 @@ import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@ -142,8 +137,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
Request createIndex = new Request("PUT", "/" + index);
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
client().performRequest(createIndex);
count = randomIntBetween(2000, 3000);
byte[] randomByteArray = new byte[16];
@ -164,16 +160,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
count = countOfIndexedRandomDocuments();
}
Map<String, String> params = new HashMap<>();
params.put("timeout", "2m");
params.put("wait_for_status", "green");
params.put("wait_for_no_relocating_shards", "true");
params.put("wait_for_events", "languid");
Map<String, Object> healthRsp = toMap(client().performRequest("GET", "/_cluster/health/" + index, params));
logger.info("health api response: {}", healthRsp);
assertEquals("green", healthRsp.get("status"));
assertFalse((Boolean) healthRsp.get("timed_out"));
ensureGreenLongWait(index);
assertBasicSearchWorks(count);
assertAllSearchWorks(count);
assertBasicAggregationWorks();
@ -205,8 +192,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
Request createIndex = new Request("PUT", "/" + index);
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
client().performRequest(createIndex);
int numDocs = randomIntBetween(2000, 3000);
indexRandomDocuments(numDocs, true, false, i -> {
@ -215,33 +203,26 @@ public class FullClusterRestartIT extends ESRestTestCase {
.endObject();
});
logger.info("Refreshing [{}]", index);
client().performRequest("POST", "/" + index + "/_refresh");
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
} else {
final int numReplicas = 1;
final long startTime = System.currentTimeMillis();
logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, index);
String requestBody = "{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}";
Response response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
assertEquals(200, response.getStatusLine().getStatusCode());
Request setNumberOfReplicas = new Request("PUT", "/" + index + "/_settings");
setNumberOfReplicas.setJsonEntity("{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}");
Response response = client().performRequest(setNumberOfReplicas);
Map<String, String> params = new HashMap<>();
params.put("timeout", "2m");
params.put("wait_for_status", "green");
params.put("wait_for_no_relocating_shards", "true");
params.put("wait_for_events", "languid");
Map<String, Object> healthRsp = toMap(client().performRequest("GET", "/_cluster/health/" + index, params));
assertEquals("green", healthRsp.get("status"));
assertFalse((Boolean) healthRsp.get("timed_out"));
ensureGreenLongWait(index);
logger.debug("--> index [{}] is green, took [{}] ms", index, (System.currentTimeMillis() - startTime));
Map<String, Object> recoverRsp = toMap(client().performRequest("GET", "/" + index + "/_recovery"));
Map<String, Object> recoverRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_recovery")));
logger.debug("--> recovery status:\n{}", recoverRsp);
Set<Integer> counts = new HashSet<>();
for (String node : dataNodes(index, client())) {
Map<String, Object> responseBody = toMap(client().performRequest("GET", "/" + index + "/_search",
Collections.singletonMap("preference", "_only_nodes:" + node)));
Request search = new Request("GET", "/" + index + "/_search");
search.addParameter("preference", "_only_nodes:" + node);
Map<String, Object> responseBody = entityAsMap(client().performRequest(search));
assertNoFailures(responseBody);
int hits = (int) XContentMapValues.extractValue("hits.total", responseBody);
counts.add(hits);
@ -282,12 +263,13 @@ public class FullClusterRestartIT extends ESRestTestCase {
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
Request createIndex = new Request("PUT", "/" + index);
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
client().performRequest(createIndex);
String aliasName = "%23" + index; // %23 == #
client().performRequest("PUT", "/" + index + "/_alias/" + aliasName);
Response response = client().performRequest("HEAD", "/" + index + "/_alias/" + aliasName);
client().performRequest(new Request("PUT", "/" + index + "/_alias/" + aliasName));
Response response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + aliasName));
assertEquals(200, response.getStatusLine().getStatusCode());
count = randomIntBetween(32, 128);
@ -301,19 +283,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
count = countOfIndexedRandomDocuments();
}
logger.error("clusterState=" + toMap(client().performRequest("GET", "/_cluster/state",
Collections.singletonMap("metric", "metadata"))));
Request request = new Request("GET", "/_cluster/state");
request.addParameter("metric", "metadata");
logger.error("clusterState=" + entityAsMap(client().performRequest(request)));
// We can read from the alias just like we can read from the index.
String aliasName = "%23" + index; // %23 == #
Map<String, Object> searchRsp = toMap(client().performRequest("GET", "/" + aliasName + "/_search"));
Map<String, Object> searchRsp = entityAsMap(client().performRequest(new Request("GET", "/" + aliasName + "/_search")));
int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
assertEquals(count, totalHits);
if (runningAgainstOldCluster == false) {
// We can remove the alias.
Response response = client().performRequest("DELETE", "/" + index + "/_alias/" + aliasName);
Response response = client().performRequest(new Request("DELETE", "/" + index + "/_alias/" + aliasName));
assertEquals(200, response.getStatusLine().getStatusCode());
// and check that it is gone:
response = client().performRequest("HEAD", "/" + index + "/_alias/" + aliasName);
response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + aliasName));
assertEquals(404, response.getStatusLine().getStatusCode());
}
}
@ -330,13 +313,14 @@ public class FullClusterRestartIT extends ESRestTestCase {
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/_template/template_1", Collections.emptyMap(),
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
client().performRequest("PUT", "/" + index);
Request createTemplate = new Request("PUT", "/_template/template_1");
createTemplate.setJsonEntity(Strings.toString(mappingsAndSettings));
client().performRequest(createTemplate);
client().performRequest(new Request("PUT", "/" + index));
}
// verifying if we can still read some properties from cluster state api:
Map<String, Object> clusterState = toMap(client().performRequest("GET", "/_cluster/state"));
Map<String, Object> clusterState = entityAsMap(client().performRequest(new Request("GET", "/_cluster/state")));
// Check some global properties:
String clusterName = (String) clusterState.get("cluster_name");
@ -381,8 +365,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
Request createIndex = new Request("PUT", "/" + index);
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
client().performRequest(createIndex);
numDocs = randomIntBetween(512, 1024);
indexRandomDocuments(numDocs, true, true, i -> {
@ -393,23 +378,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
String updateSettingsRequestBody = "{\"settings\": {\"index.blocks.write\": true}}";
Response rsp = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
new StringEntity(updateSettingsRequestBody, ContentType.APPLICATION_JSON));
assertEquals(200, rsp.getStatusLine().getStatusCode());
Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings");
updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}");
client().performRequest(updateSettingsRequest);
String shrinkIndexRequestBody = "{\"settings\": {\"index.number_of_shards\": 1}}";
rsp = client().performRequest("PUT", "/" + index + "/_shrink/" + shrunkenIndex, Collections.emptyMap(),
new StringEntity(shrinkIndexRequestBody, ContentType.APPLICATION_JSON));
assertEquals(200, rsp.getStatusLine().getStatusCode());
Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex);
shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}");
client().performRequest(shrinkIndexRequest);
rsp = client().performRequest("POST", "/_refresh");
assertEquals(200, rsp.getStatusLine().getStatusCode());
client().performRequest(new Request("POST", "/_refresh"));
} else {
numDocs = countOfIndexedRandomDocuments();
}
Map<?, ?> response = toMap(client().performRequest("GET", "/" + index + "/_search"));
Map<?, ?> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
assertNoFailures(response);
int totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
assertThat(totalShards, greaterThan(1));
@ -418,7 +400,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
int totalHits = (int) XContentMapValues.extractValue("hits.total", response);
assertEquals(numDocs, totalHits);
response = toMap(client().performRequest("GET", "/" + shrunkenIndex+ "/_search"));
response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex+ "/_search")));
assertNoFailures(response);
totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
assertEquals(1, totalShards);
@ -448,8 +430,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
Request createIndex = new Request("PUT", "/" + index);
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
client().performRequest(createIndex);
numDocs = randomIntBetween(512, 1024);
indexRandomDocuments(numDocs, true, true, i -> {
@ -460,23 +443,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
} else {
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
String updateSettingsRequestBody = "{\"settings\": {\"index.blocks.write\": true}}";
Response rsp = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
new StringEntity(updateSettingsRequestBody, ContentType.APPLICATION_JSON));
assertEquals(200, rsp.getStatusLine().getStatusCode());
Request updateSettingsRequest = new Request("PUT", "/" + index + "/_settings");
updateSettingsRequest.setJsonEntity("{\"settings\": {\"index.blocks.write\": true}}");
client().performRequest(updateSettingsRequest);
String shrinkIndexRequestBody = "{\"settings\": {\"index.number_of_shards\": 1}}";
rsp = client().performRequest("PUT", "/" + index + "/_shrink/" + shrunkenIndex, Collections.emptyMap(),
new StringEntity(shrinkIndexRequestBody, ContentType.APPLICATION_JSON));
assertEquals(200, rsp.getStatusLine().getStatusCode());
Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex);
shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}");
client().performRequest(shrinkIndexRequest);
numDocs = countOfIndexedRandomDocuments();
}
Response rsp = client().performRequest("POST", "/_refresh");
assertEquals(200, rsp.getStatusLine().getStatusCode());
client().performRequest(new Request("POST", "/_refresh"));
Map<?, ?> response = toMap(client().performRequest("GET", "/" + index + "/_search"));
Map<?, ?> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
assertNoFailures(response);
int totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
assertThat(totalShards, greaterThan(1));
@ -486,7 +466,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
assertEquals(numDocs, totalHits);
if (runningAgainstOldCluster == false) {
response = toMap(client().performRequest("GET", "/" + shrunkenIndex + "/_search"));
response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex + "/_search")));
assertNoFailures(response);
totalShards = (int) XContentMapValues.extractValue("_shards.total", response);
assertEquals(1, totalShards);
@ -499,43 +479,48 @@ public class FullClusterRestartIT extends ESRestTestCase {
void assertBasicSearchWorks(int count) throws IOException {
logger.info("--> testing basic search");
Map<String, Object> response = toMap(client().performRequest("GET", "/" + index + "/_search"));
{
Map<String, Object> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
assertNoFailures(response);
int numDocs = (int) XContentMapValues.extractValue("hits.total", response);
logger.info("Found {} in old index", numDocs);
assertEquals(count, numDocs);
}
logger.info("--> testing basic search with sort");
String searchRequestBody = "{ \"sort\": [{ \"int\" : \"asc\" }]}";
response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
{
Request searchRequest = new Request("GET", "/" + index + "/_search");
searchRequest.setJsonEntity("{ \"sort\": [{ \"int\" : \"asc\" }]}");
Map<String, Object> response = entityAsMap(client().performRequest(searchRequest));
assertNoFailures(response);
numDocs = (int) XContentMapValues.extractValue("hits.total", response);
assertEquals(count, numDocs);
assertTotalHits(count, response);
}
logger.info("--> testing exists filter");
searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"string\"} }}";
response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
{
Request searchRequest = new Request("GET", "/" + index + "/_search");
searchRequest.setJsonEntity("{ \"query\": { \"exists\" : {\"field\": \"string\"} }}");
Map<String, Object> response = entityAsMap(client().performRequest(searchRequest));
assertNoFailures(response);
numDocs = (int) XContentMapValues.extractValue("hits.total", response);
assertEquals(count, numDocs);
assertTotalHits(count, response);
}
searchRequestBody = "{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}";
response = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
new StringEntity(searchRequestBody, ContentType.APPLICATION_JSON)));
logger.info("--> testing field with dots in the name");
{
Request searchRequest = new Request("GET", "/" + index + "/_search");
searchRequest.setJsonEntity("{ \"query\": { \"exists\" : {\"field\": \"field.with.dots\"} }}");
Map<String, Object> response = entityAsMap(client().performRequest(searchRequest));
assertNoFailures(response);
numDocs = (int) XContentMapValues.extractValue("hits.total", response);
assertEquals(count, numDocs);
assertTotalHits(count, response);
}
}
void assertAllSearchWorks(int count) throws IOException {
logger.info("--> testing _all search");
Map<String, Object> searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search"));
assertNoFailures(searchRsp);
int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
assertEquals(count, totalHits);
Map<?, ?> bestHit = (Map<?, ?>) ((List<?>)(XContentMapValues.extractValue("hits.hits", searchRsp))).get(0);
Map<String, Object> response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
assertNoFailures(response);
assertTotalHits(count, response);
Map<?, ?> bestHit = (Map<?, ?>) ((List<?>) (XContentMapValues.extractValue("hits.hits", response))).get(0);
// Make sure there are payloads and they are taken into account for the score
// the 'string' field has a boost of 4 in the mappings so it should get a payload boost
@ -543,82 +528,77 @@ public class FullClusterRestartIT extends ESRestTestCase {
assertNotNull(stringValue);
String type = (String) bestHit.get("_type");
String id = (String) bestHit.get("_id");
String requestBody = "{ \"query\": { \"match_all\" : {} }}";
String explanation = toStr(client().performRequest("GET", "/" + index + "/" + type + "/" + id,
Collections.emptyMap(), new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
Request explanationRequest = new Request("GET", "/" + index + "/" + type + "/" + id + "/_explain");
explanationRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}");
String explanation = toStr(client().performRequest(explanationRequest));
assertFalse("Could not find payload boost in explanation\n" + explanation, explanation.contains("payloadBoost"));
// Make sure the query can run on the whole index
searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search",
Collections.singletonMap("explain", "true"), new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
assertNoFailures(searchRsp);
totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
assertEquals(count, totalHits);
Request searchRequest = new Request("GET", "/" + index + "/_search");
searchRequest.setEntity(explanationRequest.getEntity());
searchRequest.addParameter("explain", "true");
Map<?, ?> matchAllResponse = entityAsMap(client().performRequest(searchRequest));
assertNoFailures(matchAllResponse);
assertTotalHits(count, matchAllResponse);
}
void assertBasicAggregationWorks() throws IOException {
// histogram on a long
String requestBody = "{ \"aggs\": { \"histo\" : {\"histogram\" : {\"field\": \"int\", \"interval\": 10}} }}";
Map<?, ?> searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
assertNoFailures(searchRsp);
List<?> histoBuckets = (List<?>) XContentMapValues.extractValue("aggregations.histo.buckets", searchRsp);
long totalCount = 0;
Request longHistogramRequest = new Request("GET", "/" + index + "/_search");
longHistogramRequest.setJsonEntity("{ \"aggs\": { \"histo\" : {\"histogram\" : {\"field\": \"int\", \"interval\": 10}} }}");
Map<?, ?> longHistogram = entityAsMap(client().performRequest(longHistogramRequest));
assertNoFailures(longHistogram);
List<?> histoBuckets = (List<?>) XContentMapValues.extractValue("aggregations.histo.buckets", longHistogram);
int histoCount = 0;
for (Object entry : histoBuckets) {
Map<?, ?> bucket = (Map<?, ?>) entry;
totalCount += (Integer) bucket.get("doc_count");
histoCount += (Integer) bucket.get("doc_count");
}
int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
assertEquals(totalHits, totalCount);
assertTotalHits(histoCount, longHistogram);
// terms on a boolean
requestBody = "{ \"aggs\": { \"bool_terms\" : {\"terms\" : {\"field\": \"bool\"}} }}";
searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
List<?> termsBuckets = (List<?>) XContentMapValues.extractValue("aggregations.bool_terms.buckets", searchRsp);
totalCount = 0;
Request boolTermsRequest = new Request("GET", "/" + index + "/_search");
boolTermsRequest.setJsonEntity("{ \"aggs\": { \"bool_terms\" : {\"terms\" : {\"field\": \"bool\"}} }}");
Map<?, ?> boolTerms = entityAsMap(client().performRequest(boolTermsRequest));
List<?> termsBuckets = (List<?>) XContentMapValues.extractValue("aggregations.bool_terms.buckets", boolTerms);
int termsCount = 0;
for (Object entry : termsBuckets) {
Map<?, ?> bucket = (Map<?, ?>) entry;
totalCount += (Integer) bucket.get("doc_count");
termsCount += (Integer) bucket.get("doc_count");
}
totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp);
assertEquals(totalHits, totalCount);
assertTotalHits(termsCount, boolTerms);
}
void assertRealtimeGetWorks() throws IOException {
String requestBody = "{ \"index\": { \"refresh_interval\" : -1 }}";
Response response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
assertEquals(200, response.getStatusLine().getStatusCode());
Request disableAutoRefresh = new Request("PUT", "/" + index + "/_settings");
disableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : -1 }}");
client().performRequest(disableAutoRefresh);
requestBody = "{ \"query\": { \"match_all\" : {} }}";
Map<String, Object> searchRsp = toMap(client().performRequest("GET", "/" + index + "/_search", Collections.emptyMap(),
new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
Map<?, ?> hit = (Map<?, ?>) ((List<?>)(XContentMapValues.extractValue("hits.hits", searchRsp))).get(0);
Request searchRequest = new Request("GET", "/" + index + "/_search");
searchRequest.setJsonEntity("{ \"query\": { \"match_all\" : {} }}");
Map<?, ?> searchResponse = entityAsMap(client().performRequest(searchRequest));
Map<?, ?> hit = (Map<?, ?>) ((List<?>)(XContentMapValues.extractValue("hits.hits", searchResponse))).get(0);
String docId = (String) hit.get("_id");
requestBody = "{ \"doc\" : { \"foo\": \"bar\"}}";
response = client().performRequest("POST", "/" + index + "/doc/" + docId + "/_update", Collections.emptyMap(),
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
assertEquals(200, response.getStatusLine().getStatusCode());
Request updateRequest = new Request("POST", "/" + index + "/doc/" + docId + "/_update");
updateRequest.setJsonEntity("{ \"doc\" : { \"foo\": \"bar\"}}");
client().performRequest(updateRequest);
Map<String, Object> getRsp = toMap(client().performRequest("GET", "/" + index + "/doc/" + docId));
Map<String, Object> getRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/doc/" + docId)));
Map<?, ?> source = (Map<?, ?>) getRsp.get("_source");
assertTrue("doc does not contain 'foo' key: " + source, source.containsKey("foo"));
requestBody = "{ \"index\": { \"refresh_interval\" : \"1s\" }}";
response = client().performRequest("PUT", "/" + index + "/_settings", Collections.emptyMap(),
new StringEntity(requestBody, ContentType.APPLICATION_JSON));
assertEquals(200, response.getStatusLine().getStatusCode());
Request enableAutoRefresh = new Request("PUT", "/" + index + "/_settings");
enableAutoRefresh.setJsonEntity("{ \"index\": { \"refresh_interval\" : \"1s\" }}");
client().performRequest(enableAutoRefresh);
}
void assertStoredBinaryFields(int count) throws Exception {
String requestBody = "{ \"query\": { \"match_all\" : {} }, \"size\": 100, \"stored_fields\": \"binary\"}";
Map<String, Object> rsp = toMap(client().performRequest("GET", "/" + index + "/_search",
Collections.emptyMap(), new StringEntity(requestBody, ContentType.APPLICATION_JSON)));
Request request = new Request("GET", "/" + index + "/_search");
request.setJsonEntity("{ \"query\": { \"match_all\" : {} }, \"size\": 100, \"stored_fields\": \"binary\"}");
Map<String, Object> rsp = entityAsMap(client().performRequest(request));
int totalCount = (Integer) XContentMapValues.extractValue("hits.total", rsp);
assertEquals(count, totalCount);
assertTotalHits(count, rsp);
List<?> hits = (List<?>) XContentMapValues.extractValue("hits.hits", rsp);
assertEquals(100, hits.size());
for (Object hit : hits) {
@ -631,14 +611,6 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
}
static Map<String, Object> toMap(Response response) throws IOException {
return toMap(EntityUtils.toString(response.getEntity()));
}
static Map<String, Object> toMap(String response) throws IOException {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false);
}
static String toStr(Response response) throws IOException {
return EntityUtils.toString(response.getEntity());
}
@ -648,6 +620,11 @@ public class FullClusterRestartIT extends ESRestTestCase {
assertEquals(0, failed);
}
static void assertTotalHits(int expectedTotalHits, Map<?, ?> response) {
int actualTotalHits = (Integer) XContentMapValues.extractValue("hits.total", response);
assertEquals(expectedTotalHits, actualTotalHits);
}
/**
* Tests that a single document survives. Super basic smoke test.
*/
@ -656,11 +633,12 @@ public class FullClusterRestartIT extends ESRestTestCase {
String doc = "{\"test\": \"test\"}";
if (runningAgainstOldCluster) {
client().performRequest("PUT", docLocation, singletonMap("refresh", "true"),
new StringEntity(doc, ContentType.APPLICATION_JSON));
Request createDoc = new Request("PUT", docLocation);
createDoc.setJsonEntity(doc);
client().performRequest(createDoc);
}
assertThat(toStr(client().performRequest("GET", docLocation)), containsString(doc));
assertThat(toStr(client().performRequest(new Request("GET", docLocation))), containsString(doc));
}
/**
@ -733,16 +711,18 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
// Count the documents in the index to make sure we have as many as we put there
String countResponse = toStr(client().performRequest("GET", "/" + index + "/_search", singletonMap("size", "0")));
Request countRequest = new Request("GET", "/" + index + "/_search");
countRequest.addParameter("size", "0");
String countResponse = toStr(client().performRequest(countRequest));
assertThat(countResponse, containsString("\"total\":" + count));
if (false == runningAgainstOldCluster) {
boolean restoredFromTranslog = false;
boolean foundPrimary = false;
Map<String, String> params = new HashMap<>();
params.put("h", "index,shard,type,stage,translog_ops_recovered");
params.put("s", "index,shard,type");
String recoveryResponse = toStr(client().performRequest("GET", "/_cat/recovery/" + index, params));
Request recoveryRequest = new Request("GET", "/_cat/recovery/" + index);
recoveryRequest.addParameter("h", "index,shard,type,stage,translog_ops_recovered");
recoveryRequest.addParameter("s", "index,shard,type");
String recoveryResponse = toStr(client().performRequest(recoveryRequest));
for (String line : recoveryResponse.split("\n")) {
// Find the primaries
foundPrimary = true;
@ -768,11 +748,10 @@ public class FullClusterRestartIT extends ESRestTestCase {
if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) {
int numCurrentVersion = 0;
int numBwcVersion = 0;
params.clear();
params.put("h", "prirep,shard,index,version");
params.put("s", "prirep,shard,index");
String segmentsResponse = toStr(
client().performRequest("GET", "/_cat/segments/" + index, params));
Request segmentsRequest = new Request("GET", "/_cat/segments/" + index);
segmentsRequest.addParameter("h", "prirep,shard,index,version");
segmentsRequest.addParameter("s", "prirep,shard,index");
String segmentsResponse = toStr(client().performRequest(segmentsRequest));
for (String line : segmentsResponse.split("\n")) {
if (false == line.startsWith("p")) {
continue;
@ -817,14 +796,16 @@ public class FullClusterRestartIT extends ESRestTestCase {
refresh();
// Count the documents in the index to make sure we have as many as we put there
String countResponse = toStr(client().performRequest("GET", "/" + index + "/_search", singletonMap("size", "0")));
Request countRequest = new Request("GET", "/" + index + "/_search");
countRequest.addParameter("size", "0");
String countResponse = toStr(client().performRequest(countRequest));
assertThat(countResponse, containsString("\"total\":" + count));
// Stick a routing attribute into to cluster settings so we can see it after the restore
HttpEntity routingSetting = new StringEntity(
"{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}",
ContentType.APPLICATION_JSON);
client().performRequest("PUT", "/_cluster/settings", emptyMap(), routingSetting);
Request addRoutingSettings = new Request("PUT", "/_cluster/settings");
addRoutingSettings.setJsonEntity(
"{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}");
client().performRequest(addRoutingSettings);
// Stick a template into the cluster so we can see it after the restore
XContentBuilder templateBuilder = JsonXContent.contentBuilder().startObject();
@ -857,8 +838,9 @@ public class FullClusterRestartIT extends ESRestTestCase {
templateBuilder.endObject();
}
templateBuilder.endObject().endObject();
client().performRequest("PUT", "/_template/test_template", emptyMap(),
new StringEntity(Strings.toString(templateBuilder), ContentType.APPLICATION_JSON));
Request createTemplateRequest = new Request("PUT", "/_template/test_template");
createTemplateRequest.setJsonEntity(Strings.toString(templateBuilder));
client().performRequest(createTemplateRequest);
if (runningAgainstOldCluster) {
// Create the repo
@ -871,13 +853,15 @@ public class FullClusterRestartIT extends ESRestTestCase {
repoConfig.endObject();
}
repoConfig.endObject();
client().performRequest("PUT", "/_snapshot/repo", emptyMap(),
new StringEntity(Strings.toString(repoConfig), ContentType.APPLICATION_JSON));
Request createRepoRequest = new Request("PUT", "/_snapshot/repo");
createRepoRequest.setJsonEntity(Strings.toString(repoConfig));
client().performRequest(createRepoRequest);
}
client().performRequest("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"),
singletonMap("wait_for_completion", "true"),
new StringEntity("{\"indices\": \"" + index + "\"}", ContentType.APPLICATION_JSON));
Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"));
createSnapshot.addParameter("wait_for_completion", "true");
createSnapshot.setJsonEntity("{\"indices\": \"" + index + "\"}");
client().performRequest(createSnapshot);
checkSnapshot("old_snap", count, oldClusterVersion);
if (false == runningAgainstOldCluster) {
@ -896,10 +880,13 @@ public class FullClusterRestartIT extends ESRestTestCase {
mappingsAndSettings.endObject();
}
mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
Request createIndex = new Request("PUT", "/" + index);
createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
client().performRequest(createIndex);
} else {
Response response = client().performRequest("GET", index + "/_stats", singletonMap("level", "shards"));
Request statsRequest = new Request("GET", index + "/_stats");
statsRequest.addParameter("level", "shards");
Response response = client().performRequest(statsRequest);
List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0");
String globalHistoryUUID = null;
for (Object shard : shardStats) {
@ -920,18 +907,20 @@ public class FullClusterRestartIT extends ESRestTestCase {
private void checkSnapshot(String snapshotName, int count, Version tookOnVersion) throws IOException {
// Check the snapshot metadata, especially the version
String response = toStr(client().performRequest("GET", "/_snapshot/repo/" + snapshotName, listSnapshotVerboseParams()));
Map<String, Object> map = toMap(response);
assertEquals(response, singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", map));
assertEquals(response, singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", map));
assertEquals(response, singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", map));
Request listSnapshotRequest = new Request("GET", "/_snapshot/repo/" + snapshotName);
if (false == (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0))) {
listSnapshotRequest.addParameter("verbose", "true");
}
Map<String, Object> listSnapshotResponse = entityAsMap(client().performRequest(listSnapshotRequest));
assertEquals(singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", listSnapshotResponse));
assertEquals(singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", listSnapshotResponse));
assertEquals(singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", listSnapshotResponse));
// Remove the routing setting and template so we can test restoring them.
HttpEntity clearRoutingSetting = new StringEntity(
"{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}",
ContentType.APPLICATION_JSON);
client().performRequest("PUT", "/_cluster/settings", emptyMap(), clearRoutingSetting);
client().performRequest("DELETE", "/_template/test_template", emptyMap(), clearRoutingSetting);
Request clearRoutingFromSettings = new Request("PUT", "/_cluster/settings");
clearRoutingFromSettings.setJsonEntity("{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}");
client().performRequest(clearRoutingFromSettings);
client().performRequest(new Request("DELETE", "/_template/test_template"));
// Restore
XContentBuilder restoreCommand = JsonXContent.contentBuilder().startObject();
@ -940,11 +929,15 @@ public class FullClusterRestartIT extends ESRestTestCase {
restoreCommand.field("rename_pattern", index);
restoreCommand.field("rename_replacement", "restored_" + index);
restoreCommand.endObject();
client().performRequest("POST", "/_snapshot/repo/" + snapshotName + "/_restore", singletonMap("wait_for_completion", "true"),
new StringEntity(Strings.toString(restoreCommand), ContentType.APPLICATION_JSON));
Request restoreRequest = new Request("POST", "/_snapshot/repo/" + snapshotName + "/_restore");
restoreRequest.addParameter("wait_for_completion", "true");
restoreRequest.setJsonEntity(Strings.toString(restoreCommand));
client().performRequest(restoreRequest);
// Make sure search finds all documents
String countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0")));
Request countRequest = new Request("GET", "/restored_" + index + "/_search");
countRequest.addParameter("size", "0");
String countResponse = toStr(client().performRequest(countRequest));
assertThat(countResponse, containsString("\"total\":" + count));
// Add some extra documents to the index to be sure we can still write to it after restoring it
@ -954,61 +947,56 @@ public class FullClusterRestartIT extends ESRestTestCase {
bulk.append("{\"index\":{\"_id\":\"").append(count + i).append("\"}}\n");
bulk.append("{\"test\":\"test\"}\n");
}
client().performRequest("POST", "/restored_" + index + "/doc/_bulk", singletonMap("refresh", "true"),
new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/doc/_bulk");
writeToRestoredRequest.addParameter("refresh", "true");
writeToRestoredRequest.setJsonEntity(bulk.toString());
client().performRequest(writeToRestoredRequest);
// And count to make sure the add worked
// Make sure search finds all documents
countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0")));
assertThat(countResponse, containsString("\"total\":" + (count + extras)));
Request countAfterWriteRequest = new Request("GET", "/restored_" + index + "/_search");
countAfterWriteRequest.addParameter("size", "0");
String countAfterWriteResponse = toStr(client().performRequest(countAfterWriteRequest));
assertThat(countAfterWriteResponse, containsString("\"total\":" + (count + extras)));
// Clean up the index for the next iteration
client().performRequest("DELETE", "/restored_*");
client().performRequest(new Request("DELETE", "/restored_*"));
// Check settings added by the restore process
map = toMap(client().performRequest("GET", "/_cluster/settings", singletonMap("flat_settings", "true")));
Map<String, Object> expected = new HashMap<>();
expected.put("transient", emptyMap());
expected.put("persistent", singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString()));
if (expected.equals(map) == false) {
Request clusterSettingsRequest = new Request("GET", "/_cluster/settings");
clusterSettingsRequest.addParameter("flat_settings", "true");
Map<String, Object> clusterSettingsResponse = entityAsMap(client().performRequest(clusterSettingsRequest));
Map<String, Object> expectedClusterSettings = new HashMap<>();
expectedClusterSettings.put("transient", emptyMap());
expectedClusterSettings.put("persistent",
singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString()));
if (expectedClusterSettings.equals(clusterSettingsResponse) == false) {
NotEqualMessageBuilder builder = new NotEqualMessageBuilder();
builder.compareMaps(map, expected);
builder.compareMaps(clusterSettingsResponse, expectedClusterSettings);
fail("settings don't match:\n" + builder.toString());
}
// Check that the template was restored successfully
map = toMap(client().performRequest("GET", "/_template/test_template"));
expected = new HashMap<>();
Map<String, Object> getTemplateResponse = entityAsMap(client().performRequest(new Request("GET", "/_template/test_template")));
Map<String, Object> expectedTemplate = new HashMap<>();
if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_0_0_beta1)) {
expected.put("template", "evil_*");
expectedTemplate.put("template", "evil_*");
} else {
expected.put("index_patterns", singletonList("evil_*"));
expectedTemplate.put("index_patterns", singletonList("evil_*"));
}
expected.put("settings", singletonMap("index", singletonMap("number_of_shards", "1")));
expected.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true))));
expected.put("order", 0);
expectedTemplate.put("settings", singletonMap("index", singletonMap("number_of_shards", "1")));
expectedTemplate.put("mappings", singletonMap("doc", singletonMap("_source", singletonMap("enabled", true))));
expectedTemplate.put("order", 0);
Map<String, Object> aliases = new HashMap<>();
aliases.put("alias1", emptyMap());
aliases.put("alias2", singletonMap("filter", singletonMap("term", singletonMap("version", tookOnVersion.toString()))));
expected.put("aliases", aliases);
expected = singletonMap("test_template", expected);
if (false == expected.equals(map)) {
expectedTemplate.put("aliases", aliases);
expectedTemplate = singletonMap("test_template", expectedTemplate);
if (false == expectedTemplate.equals(getTemplateResponse)) {
NotEqualMessageBuilder builder = new NotEqualMessageBuilder();
builder.compareMaps(map, expected);
builder.compareMaps(getTemplateResponse, expectedTemplate);
fail("template doesn't match:\n" + builder.toString());
}
}
/**
* Parameters required to get the version of Elasticsearch that took the snapshot.
* On versions after 5.5 we need a {@code verbose} parameter.
*/
private Map<String, String> listSnapshotVerboseParams() {
if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0)) {
return emptyMap();
}
return singletonMap("verbose", "true");
}
// TODO tests for upgrades after shrink. We've had trouble with shrink in the past.
@ -1018,14 +1006,15 @@ public class FullClusterRestartIT extends ESRestTestCase {
logger.info("Indexing {} random documents", count);
for (int i = 0; i < count; i++) {
logger.debug("Indexing document [{}]", i);
client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(),
new StringEntity(Strings.toString(docSupplier.apply(i)), ContentType.APPLICATION_JSON));
Request createDocument = new Request("POST", "/" + index + "/doc/" + i);
createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i)));
client().performRequest(createDocument);
if (rarely()) {
refresh();
}
if (flushAllowed && rarely()) {
logger.debug("Flushing [{}]", index);
client().performRequest("POST", "/" + index + "/_flush");
client().performRequest(new Request("POST", "/" + index + "/_flush"));
}
}
if (saveInfo) {
@ -1042,13 +1031,16 @@ public class FullClusterRestartIT extends ESRestTestCase {
infoDoc.field("value", value);
infoDoc.endObject();
// Only create the first version so we know how many documents are created when the index is first created
Map<String, String> params = singletonMap("op_type", "create");
client().performRequest("PUT", "/info/doc/" + index + "_" + type, params,
new StringEntity(Strings.toString(infoDoc), ContentType.APPLICATION_JSON));
Request request = new Request("PUT", "/info/doc/" + index + "_" + type);
request.addParameter("op_type", "create");
request.setJsonEntity(Strings.toString(infoDoc));
client().performRequest(request);
}
private String loadInfoDocument(String type) throws IOException {
String doc = toStr(client().performRequest("GET", "/info/doc/" + index + "_" + type, singletonMap("filter_path", "_source")));
Request request = new Request("GET", "/info/doc/" + index + "_" + type);
request.addParameter("filter_path", "_source");
String doc = toStr(client().performRequest(request));
Matcher m = Pattern.compile("\"value\":\"(.+)\"").matcher(doc);
assertTrue(doc, m.find());
return m.group(1);
@ -1060,11 +1052,13 @@ public class FullClusterRestartIT extends ESRestTestCase {
private void refresh() throws IOException {
logger.debug("Refreshing [{}]", index);
client().performRequest("POST", "/" + index + "/_refresh");
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
}
private List<String> dataNodes(String index, RestClient client) throws IOException {
Response response = client.performRequest("GET", index + "/_stats", singletonMap("level", "shards"));
Request request = new Request("GET", index + "/_stats");
request.addParameter("level", "shards");
Response response = client.performRequest(request);
List<String> nodes = new ArrayList<>();
List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0");
for (Object shard : shardStats) {
@ -1073,4 +1067,21 @@ public class FullClusterRestartIT extends ESRestTestCase {
}
return nodes;
}
/**
* Wait for an index to have green health, waiting longer than
* {@link ESRestTestCase#ensureGreen}.
*/
protected void ensureGreenLongWait(String index) throws IOException {
Request request = new Request("GET", "/_cluster/health/" + index);
request.addParameter("timeout", "2m");
request.addParameter("wait_for_status", "green");
request.addParameter("wait_for_no_relocating_shards", "true");
request.addParameter("wait_for_events", "languid");
request.addParameter("level", "shards");
Map<String, Object> healthRsp = entityAsMap(client().performRequest(request));
logger.info("health api response: {}", healthRsp);
assertEquals("green", healthRsp.get("status"));
assertFalse((Boolean) healthRsp.get("timed_out"));
}
}

View File

@ -18,8 +18,6 @@
*/
package org.elasticsearch.upgrades;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.Version;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.client.Request;
@ -32,14 +30,12 @@ import org.elasticsearch.test.rest.yaml.ObjectPath;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Future;
import java.util.function.Predicate;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING;
import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY;
@ -65,8 +61,9 @@ public class RecoveryIT extends AbstractRollingTestCase {
createIndex(index, settings.build());
} else if (CLUSTER_TYPE == ClusterType.UPGRADED) {
ensureGreen(index);
Response response = client().performRequest("GET", index + "/_stats", Collections.singletonMap("level", "shards"));
assertOK(response);
Request shardStatsRequest = new Request("GET", index + "/_stats");
shardStatsRequest.addParameter("level", "shards");
Response response = client().performRequest(shardStatsRequest);
ObjectPath objectPath = ObjectPath.createFromResponse(response);
List<Object> shardStats = objectPath.evaluate("indices." + index + ".shards.0");
assertThat(shardStats, hasSize(2));
@ -87,8 +84,9 @@ public class RecoveryIT extends AbstractRollingTestCase {
private int indexDocs(String index, final int idStart, final int numDocs) throws IOException {
for (int i = 0; i < numDocs; i++) {
final int id = idStart + i;
assertOK(client().performRequest("PUT", index + "/test/" + id, emptyMap(),
new StringEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}", ContentType.APPLICATION_JSON)));
Request indexDoc = new Request("PUT", index + "/test/" + id);
indexDoc.setJsonEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}");
client().performRequest(indexDoc);
}
return numDocs;
}
@ -113,7 +111,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
public void testRecoveryWithConcurrentIndexing() throws Exception {
final String index = "recovery_with_concurrent_indexing";
Response response = client().performRequest("GET", "_nodes");
Response response = client().performRequest(new Request("GET", "_nodes"));
ObjectPath objectPath = ObjectPath.createFromResponse(response);
final Map<String, Object> nodeMap = objectPath.evaluate("nodes");
List<String> nodes = new ArrayList<>(nodeMap.keySet());
@ -139,7 +137,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null));
asyncIndexDocs(index, 10, 50).get();
ensureGreen(index);
assertOK(client().performRequest("POST", index + "/_refresh"));
client().performRequest(new Request("POST", index + "/_refresh"));
assertCount(index, "_only_nodes:" + nodes.get(0), 60);
assertCount(index, "_only_nodes:" + nodes.get(1), 60);
assertCount(index, "_only_nodes:" + nodes.get(2), 60);
@ -150,7 +148,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null));
asyncIndexDocs(index, 60, 50).get();
ensureGreen(index);
assertOK(client().performRequest("POST", index + "/_refresh"));
client().performRequest(new Request("POST", index + "/_refresh"));
assertCount(index, "_only_nodes:" + nodes.get(0), 110);
assertCount(index, "_only_nodes:" + nodes.get(1), 110);
assertCount(index, "_only_nodes:" + nodes.get(2), 110);
@ -161,15 +159,16 @@ public class RecoveryIT extends AbstractRollingTestCase {
}
private void assertCount(final String index, final String preference, final int expectedCount) throws IOException {
final Response response = client().performRequest("GET", index + "/_count", Collections.singletonMap("preference", preference));
assertOK(response);
final Request request = new Request("GET", index + "/_count");
request.addParameter("preference", preference);
final Response response = client().performRequest(request);
final int actualCount = Integer.parseInt(ObjectPath.createFromResponse(response).evaluate("count").toString());
assertThat(actualCount, equalTo(expectedCount));
}
private String getNodeId(Predicate<Version> versionPredicate) throws IOException {
Response response = client().performRequest("GET", "_nodes");
Response response = client().performRequest(new Request("GET", "_nodes"));
ObjectPath objectPath = ObjectPath.createFromResponse(response);
Map<String, Object> nodesAsMap = objectPath.evaluate("nodes");
for (String id : nodesAsMap.keySet()) {
@ -216,7 +215,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
updateIndexSettings(index, Settings.builder().put("index.routing.allocation.include._id", newNode));
asyncIndexDocs(index, 10, 50).get();
ensureGreen(index);
assertOK(client().performRequest("POST", index + "/_refresh"));
client().performRequest(new Request("POST", index + "/_refresh"));
assertCount(index, "_only_nodes:" + newNode, 60);
break;
case UPGRADED:
@ -226,8 +225,8 @@ public class RecoveryIT extends AbstractRollingTestCase {
);
asyncIndexDocs(index, 60, 50).get();
ensureGreen(index);
assertOK(client().performRequest("POST", index + "/_refresh"));
Response response = client().performRequest("GET", "_nodes");
client().performRequest(new Request("POST", index + "/_refresh"));
Response response = client().performRequest(new Request("GET", "_nodes"));
ObjectPath objectPath = ObjectPath.createFromResponse(response);
final Map<String, Object> nodeMap = objectPath.evaluate("nodes");
List<String> nodes = new ArrayList<>(nodeMap.keySet());

View File

@ -57,6 +57,7 @@ import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.isEmptyString;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeThat;
import static org.junit.Assume.assumeTrue;
@ -302,5 +303,26 @@ public abstract class ArchiveTestCase extends PackagingTestCase {
}
}
public void test90SecurityCliPackaging() {
assumeThat(installation, is(notNullValue()));
final Installation.Executables bin = installation.executables();
final Shell sh = new Shell();
if (distribution().equals(Distribution.DEFAULT_TAR) || distribution().equals(Distribution.DEFAULT_ZIP)) {
assertTrue(Files.exists(installation.lib.resolve("tools").resolve("security-cli")));
Platforms.onLinux(() -> {
final Result result = sh.run(bin.elasticsearchCertutil + " help");
assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack"));
});
Platforms.onWindows(() -> {
final Result result = sh.run(bin.elasticsearchCertutil + " help");
assertThat(result.stdout, containsString("Simplifies certificate creation for use with the Elastic Stack"));
});
} else if (distribution().equals(Distribution.OSS_TAR) || distribution().equals(Distribution.OSS_ZIP)) {
assertFalse(Files.exists(installation.lib.resolve("tools").resolve("security-cli")));
}
}
}

View File

@ -101,6 +101,7 @@ public class Installation {
public final Path elasticsearchPlugin = platformExecutable("elasticsearch-plugin");
public final Path elasticsearchKeystore = platformExecutable("elasticsearch-keystore");
public final Path elasticsearchTranslog = platformExecutable("elasticsearch-translog");
public final Path elasticsearchCertutil = platformExecutable("elasticsearch-certutil");
private Path platformExecutable(String name) {
final String platformExecutableName = Platforms.WINDOWS

View File

@ -106,7 +106,7 @@ dependencies {
compile 'com.carrotsearch:hppc:0.7.1'
// time handling, remove with java 8 time
compile 'joda-time:joda-time:2.9.9'
compile 'joda-time:joda-time:2.10'
// percentiles aggregation
compile 'com.tdunning:t-digest:3.2'

View File

@ -0,0 +1 @@
f66c8125d1057ffce6c4e29e624cac863e110e2b

View File

@ -1 +0,0 @@
f7b520c458572890807d143670c9b24f4de90897

View File

@ -32,6 +32,11 @@ public interface AliasesRequest extends IndicesRequest.Replaceable {
*/
String[] aliases();
/**
* Returns the aliases as they were originally requested, before any potential name resolution
*/
String[] getOriginalAliases();
/**
* Replaces current aliases with the provided aliases.
*

View File

@ -214,6 +214,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
private final AliasActions.Type type;
private String[] indices;
private String[] aliases = Strings.EMPTY_ARRAY;
private String[] originalAliases = Strings.EMPTY_ARRAY;
private String filter;
private String routing;
private String indexRouting;
@ -238,6 +239,9 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
writeIndex = in.readOptionalBoolean();
}
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
originalAliases = in.readStringArray();
}
}
@Override
@ -252,6 +256,9 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
out.writeOptionalBoolean(writeIndex);
}
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
out.writeStringArray(originalAliases);
}
}
/**
@ -315,6 +322,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
}
}
this.aliases = aliases;
this.originalAliases = aliases;
return this;
}
@ -329,6 +337,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
throw new IllegalArgumentException("[alias] can't be empty string");
}
this.aliases = new String[] {alias};
this.originalAliases = aliases;
return this;
}
@ -432,6 +441,11 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
this.aliases = aliases;
}
@Override
public String[] getOriginalAliases() {
return originalAliases;
}
@Override
public boolean expandAliasesWildcards() {
//remove operations support wildcards among aliases, add operations don't
@ -579,7 +593,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
}, AliasActions.PARSER, new ParseField("actions"));
}
public static IndicesAliasesRequest fromXContent(XContentParser parser) throws IOException {
public static IndicesAliasesRequest fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
}

View File

@ -95,7 +95,7 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction<Ind
Set<String> aliases = new HashSet<>();
for (AliasActions action : actions) {
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), action.indices());
Collections.addAll(aliases, action.aliases());
Collections.addAll(aliases, action.getOriginalAliases());
for (String index : concreteIndices) {
switch (action.actionType()) {
case ADD:
@ -142,7 +142,7 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction<Ind
if (action.expandAliasesWildcards()) {
//for DELETE we expand the aliases
String[] indexAsArray = {concreteIndex};
ImmutableOpenMap<String, List<AliasMetaData>> aliasMetaData = metaData.findAliases(action.aliases(), indexAsArray);
ImmutableOpenMap<String, List<AliasMetaData>> aliasMetaData = metaData.findAliases(action, indexAsArray);
List<String> finalAliases = new ArrayList<>();
for (ObjectCursor<List<AliasMetaData>> curAliases : aliasMetaData.values()) {
for (AliasMetaData aliasMeta: curAliases.value) {

View File

@ -63,7 +63,7 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction<Get
@Override
protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener<GetAliasesResponse> listener) {
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request);
ImmutableOpenMap<String, List<AliasMetaData>> aliases = state.metaData().findAliases(request.aliases(), concreteIndices);
ImmutableOpenMap<String, List<AliasMetaData>> aliases = state.metaData().findAliases(request, concreteIndices);
listener.onResponse(new GetAliasesResponse(postProcess(request, concreteIndices, aliases)));
}

View File

@ -32,15 +32,14 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.common.settings.IndexScopedSettings;
import java.io.IOException;
import java.util.List;
@ -110,7 +109,7 @@ public class TransportGetIndexAction extends TransportClusterInfoAction<GetIndex
break;
case ALIASES:
if (!doneAliases) {
aliasesResult = state.metaData().findAliases(Strings.EMPTY_ARRAY, concreteIndices);
aliasesResult = state.metaData().findAllAliases(concreteIndices);
doneAliases = true;
}
break;

View File

@ -24,6 +24,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.action.AliasesRequest;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterState.FeatureAware;
import org.elasticsearch.cluster.Diff;
@ -248,21 +249,53 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
}
/**
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
* that point to the specified concrete indices or match partially with the indices via wildcards.
* Finds the specific index aliases that point to the specified concrete indices or match partially with the indices via wildcards.
*
* @param aliases The names of the index aliases to find
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are
* present for that index
*/
public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final String[] aliases, String[] concreteIndices) {
public ImmutableOpenMap<String, List<AliasMetaData>> findAllAliases(String[] concreteIndices) {
return findAliases(Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, concreteIndices);
}
/**
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
* that point to the specified concrete indices or match partially with the indices via wildcards.
*
* @param aliasesRequest The request to find aliases for
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are
* present for that index
*/
public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final AliasesRequest aliasesRequest, String[] concreteIndices) {
return findAliases(aliasesRequest.getOriginalAliases(), aliasesRequest.aliases(), concreteIndices);
}
/**
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
* that point to the specified concrete indices or match partially with the indices via wildcards.
*
* @param aliases The aliases to look for
* @param originalAliases The original aliases that the user originally requested
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
* @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are
* present for that index
*/
private ImmutableOpenMap<String, List<AliasMetaData>> findAliases(String[] originalAliases, String[] aliases,
String[] concreteIndices) {
assert aliases != null;
assert originalAliases != null;
assert concreteIndices != null;
if (concreteIndices.length == 0) {
return ImmutableOpenMap.of();
}
//if aliases were provided but they got replaced with empty aliases, return empty map
if (originalAliases.length > 0 && aliases.length == 0) {
return ImmutableOpenMap.of();
}
boolean matchAllAliases = matchAllAliases(aliases);
ImmutableOpenMap.Builder<String, List<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
for (String index : concreteIndices) {

View File

@ -427,7 +427,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
// the master node restoring mappings from disk or data nodes
// deserializing cluster state that was sent by the master node,
// this check will be skipped.
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size());
checkTotalFieldsLimit(objectMappers.size() + fieldMappers.size() + fieldAliasMappers.size());
}
results.put(newMapper.type(), newMapper);

View File

@ -230,7 +230,9 @@ public class TypeParsers {
} else {
throw new MapperParsingException("no type specified for property [" + multiFieldName + "]");
}
if (type.equals(ObjectMapper.CONTENT_TYPE) || type.equals(ObjectMapper.NESTED_CONTENT_TYPE)) {
if (type.equals(ObjectMapper.CONTENT_TYPE)
|| type.equals(ObjectMapper.NESTED_CONTENT_TYPE)
|| type.equals(FieldAliasMapper.CONTENT_TYPE)) {
throw new MapperParsingException("Type [" + type + "] cannot be used in multi field");
}

View File

@ -37,6 +37,7 @@ import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
/**
* CircuitBreakerService that attempts to redistribute space between breakers
@ -215,7 +216,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
}
// Manually add the parent breaker settings since they aren't part of the breaker map
allStats.add(new CircuitBreakerStats(CircuitBreaker.PARENT, parentSettings.getLimit(),
parentUsed(0L), 1.0, parentTripCount.get()));
parentUsed(0L).totalUsage, 1.0, parentTripCount.get()));
return new AllCircuitBreakerStats(allStats.toArray(new CircuitBreakerStats[allStats.size()]));
}
@ -225,15 +226,26 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
return new CircuitBreakerStats(breaker.getName(), breaker.getLimit(), breaker.getUsed(), breaker.getOverhead(), breaker.getTrippedCount());
}
private long parentUsed(long newBytesReserved) {
private static class ParentMemoryUsage {
final long baseUsage;
final long totalUsage;
ParentMemoryUsage(final long baseUsage, final long totalUsage) {
this.baseUsage = baseUsage;
this.totalUsage = totalUsage;
}
}
private ParentMemoryUsage parentUsed(long newBytesReserved) {
if (this.trackRealMemoryUsage) {
return currentMemoryUsage() + newBytesReserved;
final long current = currentMemoryUsage();
return new ParentMemoryUsage(current, current + newBytesReserved);
} else {
long parentEstimated = 0;
for (CircuitBreaker breaker : this.breakers.values()) {
parentEstimated += breaker.getUsed() * breaker.getOverhead();
}
return parentEstimated;
return new ParentMemoryUsage(parentEstimated, parentEstimated);
}
}
@ -246,15 +258,37 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
* Checks whether the parent breaker has been tripped
*/
public void checkParentLimit(long newBytesReserved, String label) throws CircuitBreakingException {
long totalUsed = parentUsed(newBytesReserved);
final ParentMemoryUsage parentUsed = parentUsed(newBytesReserved);
long parentLimit = this.parentSettings.getLimit();
if (totalUsed > parentLimit) {
if (parentUsed.totalUsage > parentLimit) {
this.parentTripCount.incrementAndGet();
final String message = "[parent] Data too large, data for [" + label + "]" +
" would be [" + totalUsed + "/" + new ByteSizeValue(totalUsed) + "]" +
final StringBuilder message = new StringBuilder("[parent] Data too large, data for [" + label + "]" +
" would be [" + parentUsed.totalUsage + "/" + new ByteSizeValue(parentUsed.totalUsage) + "]" +
", which is larger than the limit of [" +
parentLimit + "/" + new ByteSizeValue(parentLimit) + "]";
throw new CircuitBreakingException(message, totalUsed, parentLimit);
parentLimit + "/" + new ByteSizeValue(parentLimit) + "]");
if (this.trackRealMemoryUsage) {
final long realUsage = parentUsed.baseUsage;
message.append(", real usage: [");
message.append(realUsage);
message.append("/");
message.append(new ByteSizeValue(realUsage));
message.append("], new bytes reserved: [");
message.append(newBytesReserved);
message.append("/");
message.append(new ByteSizeValue(newBytesReserved));
message.append("]");
} else {
message.append(", usages [");
message.append(String.join(", ",
this.breakers.entrySet().stream().map(e -> {
final CircuitBreaker breaker = e.getValue();
final long breakerUsed = (long)(breaker.getUsed() * breaker.getOverhead());
return e.getKey() + "=" + breakerUsed + "/" + new ByteSizeValue(breakerUsed);
})
.collect(Collectors.toList())));
message.append("]");
}
throw new CircuitBreakingException(message.toString(), parentUsed.totalUsage, parentLimit);
}
}

View File

@ -22,9 +22,7 @@ package org.elasticsearch.search.sort;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToChildBlockJoinQuery;
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
@ -186,10 +184,21 @@ public abstract class SortBuilder<T extends SortBuilder<T>> implements NamedWrit
}
protected static Nested resolveNested(QueryShardContext context, NestedSortBuilder nestedSort) throws IOException {
return resolveNested(context, nestedSort, null);
final Query childQuery = resolveNestedQuery(context, nestedSort, null);
if (childQuery == null) {
return null;
}
final ObjectMapper objectMapper = context.nestedScope().getObjectMapper();
final Query parentQuery;
if (objectMapper == null) {
parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated());
} else {
parentQuery = objectMapper.nestedTypeFilter();
}
return new Nested(context.bitsetFilter(parentQuery), childQuery);
}
private static Nested resolveNested(QueryShardContext context, NestedSortBuilder nestedSort, Nested nested) throws IOException {
private static Query resolveNestedQuery(QueryShardContext context, NestedSortBuilder nestedSort, Query parentQuery) throws IOException {
if (nestedSort == null || nestedSort.getPath() == null) {
return null;
}
@ -207,15 +216,7 @@ public abstract class SortBuilder<T extends SortBuilder<T>> implements NamedWrit
if (!nestedObjectMapper.nested().isNested()) {
throw new QueryShardException(context, "[nested] nested object under path [" + nestedPath + "] is not of nested type");
}
// get our parent query which will determines our parent documents
Query parentQuery;
ObjectMapper objectMapper = context.nestedScope().getObjectMapper();
if (objectMapper == null) {
parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated());
} else {
parentQuery = objectMapper.nestedTypeFilter();
}
// get our child query, potentially applying a users filter
Query childQuery;
@ -223,7 +224,7 @@ public abstract class SortBuilder<T extends SortBuilder<T>> implements NamedWrit
context.nestedScope().nextLevel(nestedObjectMapper);
if (nestedFilter != null) {
assert nestedFilter == Rewriteable.rewrite(nestedFilter, context) : "nested filter is not rewritten";
if (nested == null) {
if (parentQuery == null) {
// this is for back-compat, original single level nested sorting never applied a nested type filter
childQuery = nestedFilter.toFilter(context);
} else {
@ -237,27 +238,23 @@ public abstract class SortBuilder<T extends SortBuilder<T>> implements NamedWrit
}
// apply filters from the previous nested level
if (nested != null) {
parentQuery = Queries.filtered(parentQuery,
new ToParentBlockJoinQuery(nested.getInnerQuery(), nested.getRootFilter(), ScoreMode.None));
if (parentQuery != null) {
if (objectMapper != null) {
childQuery = Queries.filtered(childQuery,
new ToChildBlockJoinQuery(nested.getInnerQuery(), context.bitsetFilter(objectMapper.nestedTypeFilter())));
new ToChildBlockJoinQuery(parentQuery, context.bitsetFilter(objectMapper.nestedTypeFilter())));
}
}
// wrap up our parent and child and either process the next level of nesting or return
final Nested innerNested = new Nested(context.bitsetFilter(parentQuery), childQuery);
if (nestedNestedSort != null) {
try {
context.nestedScope().nextLevel(nestedObjectMapper);
return resolveNested(context, nestedNestedSort, innerNested);
return resolveNestedQuery(context, nestedNestedSort, childQuery);
} finally {
context.nestedScope().previousLevel();
}
} else {
return innerNested;
return childQuery;
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.UUIDs;
@ -41,6 +42,7 @@ import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -50,6 +52,63 @@ import static org.hamcrest.Matchers.startsWith;
public class MetaDataTests extends ESTestCase {
public void testFindAliases() {
MetaData metaData = MetaData.builder().put(IndexMetaData.builder("index")
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetaData.builder("alias1").build())
.putAlias(AliasMetaData.builder("alias2").build())).build();
{
ImmutableOpenMap<String, List<AliasMetaData>> aliases = metaData.findAliases(new GetAliasesRequest(), Strings.EMPTY_ARRAY);
assertThat(aliases.size(), equalTo(0));
}
{
ImmutableOpenMap<String, List<AliasMetaData>> aliases = metaData.findAliases(new GetAliasesRequest(), new String[]{"index"});
assertThat(aliases.size(), equalTo(1));
List<AliasMetaData> aliasMetaDataList = aliases.get("index");
assertThat(aliasMetaDataList.size(), equalTo(2));
assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1"));
assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2"));
}
{
GetAliasesRequest getAliasesRequest = new GetAliasesRequest("alias1");
getAliasesRequest.replaceAliases(Strings.EMPTY_ARRAY);
ImmutableOpenMap<String, List<AliasMetaData>> aliases = metaData.findAliases(getAliasesRequest, new String[]{"index"});
assertThat(aliases.size(), equalTo(0));
}
{
ImmutableOpenMap<String, List<AliasMetaData>> aliases =
metaData.findAliases(new GetAliasesRequest("alias*"), new String[]{"index"});
assertThat(aliases.size(), equalTo(1));
List<AliasMetaData> aliasMetaDataList = aliases.get("index");
assertThat(aliasMetaDataList.size(), equalTo(2));
assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1"));
assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2"));
}
{
ImmutableOpenMap<String, List<AliasMetaData>> aliases =
metaData.findAliases(new GetAliasesRequest("alias1"), new String[]{"index"});
assertThat(aliases.size(), equalTo(1));
List<AliasMetaData> aliasMetaDataList = aliases.get("index");
assertThat(aliasMetaDataList.size(), equalTo(1));
assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1"));
}
{
ImmutableOpenMap<String, List<AliasMetaData>> aliases = metaData.findAllAliases(new String[]{"index"});
assertThat(aliases.size(), equalTo(1));
List<AliasMetaData> aliasMetaDataList = aliases.get("index");
assertThat(aliasMetaDataList.size(), equalTo(2));
assertThat(aliasMetaDataList.get(0).alias(), equalTo("alias1"));
assertThat(aliasMetaDataList.get(1).alias(), equalTo("alias2"));
}
{
ImmutableOpenMap<String, List<AliasMetaData>> aliases = metaData.findAllAliases(Strings.EMPTY_ARRAY);
assertThat(aliases.size(), equalTo(0));
}
}
public void testIndexAndAliasWithSameName() {
IndexMetaData.Builder builder = IndexMetaData.builder("index")
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))

View File

@ -76,4 +76,28 @@ public class DocumentMapperParserTests extends ESSingleNodeTestCase {
mapperParser.parse("type", new CompressedXContent(mapping)));
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] of different type"));
}
public void testMultiFieldsWithFieldAlias() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "text")
.startObject("fields")
.startObject("alias")
.field("type", "alias")
.field("path", "other-field")
.endObject()
.endObject()
.endObject()
.startObject("other-field")
.field("type", "keyword")
.endObject()
.endObject()
.endObject().endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapperParser.parse("type", new CompressedXContent(mapping)));
assertEquals("Type [alias] cannot be used in multi field", e.getMessage());
}
}

View File

@ -270,6 +270,37 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
assertThat(e.getMessage(), containsString("Invalid [path] value [nested.field] for field alias [alias]"));
}
public void testTotalFieldsLimitWithFieldAlias() throws Throwable {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("alias")
.field("type", "alias")
.field("path", "field")
.endObject()
.startObject("field")
.field("type", "text")
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper documentMapper = createIndex("test1").mapperService()
.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
// Set the total fields limit to the number of non-alias fields, to verify that adding
// a field alias pushes the mapping over the limit.
int numFields = documentMapper.mapping().metadataMappers.length + 2;
int numNonAliasFields = numFields - 1;
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
Settings settings = Settings.builder()
.put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), numNonAliasFields)
.build();
createIndex("test2", settings).mapperService()
.merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
});
assertEquals("Limit of total fields [" + numNonAliasFields + "] in index [test2] has been exceeded", e.getMessage());
}
public void testForbidMultipleTypes() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();

View File

@ -42,7 +42,6 @@ import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
@ -51,7 +50,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -74,7 +72,6 @@ import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQuery
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertDisjunctionSubQuery;
import static org.hamcrest.CoreMatchers.either;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
@ -381,11 +378,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
@Override
protected void doAssertLuceneQuery(QueryStringQueryBuilder queryBuilder,
Query query, SearchContext context) throws IOException {
assertThat(query, either(instanceOf(TermQuery.class))
.or(instanceOf(BooleanQuery.class)).or(instanceOf(DisjunctionMaxQuery.class))
.or(instanceOf(PhraseQuery.class)).or(instanceOf(BoostQuery.class))
.or(instanceOf(MultiPhrasePrefixQuery.class)).or(instanceOf(PrefixQuery.class)).or(instanceOf(SpanQuery.class))
.or(instanceOf(MatchNoDocsQuery.class)));
// nothing yet, put additional assertions here.
}
// Tests fix for https://github.com/elastic/elasticsearch/issues/29403

View File

@ -21,7 +21,11 @@ package org.elasticsearch.index.search.nested;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
@ -40,21 +44,37 @@ import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.fielddata.AbstractFieldDataTestCase;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.NoOrdinalsStringFieldDataTests;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.NestedQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.NestedSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.index.mapper.SeqNoFieldMapper.PRIMARY_TERM_NAME;
import static org.hamcrest.Matchers.equalTo;
public class NestedSortingTests extends AbstractFieldDataTestCase {
@ -343,4 +363,437 @@ public class NestedSortingTests extends AbstractFieldDataTestCase {
searcher.getIndexReader().close();
}
public void testMultiLevelNestedSorting() throws IOException {
XContentBuilder mapping = XContentFactory.jsonBuilder();
mapping.startObject();
{
mapping.startObject("_doc");
{
mapping.startObject("properties");
{
{
mapping.startObject("title");
mapping.field("type", "text");
mapping.endObject();
}
{
mapping.startObject("genre");
mapping.field("type", "keyword");
mapping.endObject();
}
{
mapping.startObject("chapters");
mapping.field("type", "nested");
{
mapping.startObject("properties");
{
mapping.startObject("title");
mapping.field("type", "text");
mapping.endObject();
}
{
mapping.startObject("read_time_seconds");
mapping.field("type", "integer");
mapping.endObject();
}
{
mapping.startObject("paragraphs");
mapping.field("type", "nested");
{
mapping.startObject("properties");
{
{
mapping.startObject("header");
mapping.field("type", "text");
mapping.endObject();
}
{
mapping.startObject("content");
mapping.field("type", "text");
mapping.endObject();
}
{
mapping.startObject("word_count");
mapping.field("type", "integer");
mapping.endObject();
}
}
mapping.endObject();
}
mapping.endObject();
}
mapping.endObject();
}
mapping.endObject();
}
}
mapping.endObject();
}
mapping.endObject();
}
mapping.endObject();
IndexService indexService = createIndex("nested_sorting", Settings.EMPTY, "_doc", mapping);
List<List<Document>> books = new ArrayList<>();
{
List<Document> book = new ArrayList<>();
Document document = new Document();
document.add(new TextField("chapters.paragraphs.header", "Paragraph 1", Field.Store.NO));
document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO));
document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO));
document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 743));
document.add(new IntPoint("chapters.paragraphs.word_count", 743));
book.add(document);
document = new Document();
document.add(new TextField("chapters.title", "chapter 3", Field.Store.NO));
document.add(new StringField("_type", "__chapters", Field.Store.NO));
document.add(new IntPoint("chapters.read_time_seconds", 400));
document.add(new NumericDocValuesField("chapters.read_time_seconds", 400));
book.add(document);
document = new Document();
document.add(new TextField("chapters.paragraphs.header", "Paragraph 1", Field.Store.NO));
document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO));
document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO));
document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 234));
document.add(new IntPoint("chapters.paragraphs.word_count", 234));
book.add(document);
document = new Document();
document.add(new TextField("chapters.title", "chapter 2", Field.Store.NO));
document.add(new StringField("_type", "__chapters", Field.Store.NO));
document.add(new IntPoint("chapters.read_time_seconds", 200));
document.add(new NumericDocValuesField("chapters.read_time_seconds", 200));
book.add(document);
document = new Document();
document.add(new TextField("chapters.paragraphs.header", "Paragraph 2", Field.Store.NO));
document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO));
document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO));
document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 478));
document.add(new IntPoint("chapters.paragraphs.word_count", 478));
book.add(document);
document = new Document();
document.add(new TextField("chapters.paragraphs.header", "Paragraph 1", Field.Store.NO));
document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO));
document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO));
document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 849));
document.add(new IntPoint("chapters.paragraphs.word_count", 849));
book.add(document);
document = new Document();
document.add(new TextField("chapters.title", "chapter 1", Field.Store.NO));
document.add(new StringField("_type", "__chapters", Field.Store.NO));
document.add(new IntPoint("chapters.read_time_seconds", 1400));
document.add(new NumericDocValuesField("chapters.read_time_seconds", 1400));
book.add(document);
document = new Document();
document.add(new StringField("genre", "science fiction", Field.Store.NO));
document.add(new StringField("_type", "_doc", Field.Store.NO));
document.add(new StringField("_id", "1", Field.Store.YES));
document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0));
book.add(document);
books.add(book);
}
{
List<Document> book = new ArrayList<>();
Document document = new Document();
document.add(new TextField("chapters.paragraphs.header", "Introduction", Field.Store.NO));
document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO));
document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO));
document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 76));
document.add(new IntPoint("chapters.paragraphs.word_count", 76));
book.add(document);
document = new Document();
document.add(new TextField("chapters.title", "chapter 1", Field.Store.NO));
document.add(new StringField("_type", "__chapters", Field.Store.NO));
document.add(new IntPoint("chapters.read_time_seconds", 20));
document.add(new NumericDocValuesField("chapters.read_time_seconds", 20));
book.add(document);
document = new Document();
document.add(new StringField("genre", "romance", Field.Store.NO));
document.add(new StringField("_type", "_doc", Field.Store.NO));
document.add(new StringField("_id", "2", Field.Store.YES));
document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0));
book.add(document);
books.add(book);
}
{
List<Document> book = new ArrayList<>();
Document document = new Document();
document.add(new TextField("chapters.paragraphs.header", "A bad dream", Field.Store.NO));
document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO));
document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO));
document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 976));
document.add(new IntPoint("chapters.paragraphs.word_count", 976));
book.add(document);
document = new Document();
document.add(new TextField("chapters.title", "The beginning of the end", Field.Store.NO));
document.add(new StringField("_type", "__chapters", Field.Store.NO));
document.add(new IntPoint("chapters.read_time_seconds", 1200));
document.add(new NumericDocValuesField("chapters.read_time_seconds", 1200));
book.add(document);
document = new Document();
document.add(new StringField("genre", "horror", Field.Store.NO));
document.add(new StringField("_type", "_doc", Field.Store.NO));
document.add(new StringField("_id", "3", Field.Store.YES));
document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0));
book.add(document);
books.add(book);
}
{
List<Document> book = new ArrayList<>();
Document document = new Document();
document.add(new TextField("chapters.paragraphs.header", "macaroni", Field.Store.NO));
document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO));
document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO));
document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 180));
document.add(new IntPoint("chapters.paragraphs.word_count", 180));
book.add(document);
document = new Document();
document.add(new TextField("chapters.paragraphs.header", "hamburger", Field.Store.NO));
document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO));
document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO));
document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 150));
document.add(new IntPoint("chapters.paragraphs.word_count", 150));
book.add(document);
document = new Document();
document.add(new TextField("chapters.paragraphs.header", "tosti", Field.Store.NO));
document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO));
document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO));
document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 120));
document.add(new IntPoint("chapters.paragraphs.word_count", 120));
book.add(document);
document = new Document();
document.add(new TextField("chapters.title", "easy meals", Field.Store.NO));
document.add(new StringField("_type", "__chapters", Field.Store.NO));
document.add(new IntPoint("chapters.read_time_seconds", 800));
document.add(new NumericDocValuesField("chapters.read_time_seconds", 800));
book.add(document);
document = new Document();
document.add(new TextField("chapters.paragraphs.header", "introduction", Field.Store.NO));
document.add(new StringField("_type", "__chapters.paragraphs", Field.Store.NO));
document.add(new TextField("chapters.paragraphs.text", "some text...", Field.Store.NO));
document.add(new SortedNumericDocValuesField("chapters.paragraphs.word_count", 87));
document.add(new IntPoint("chapters.paragraphs.word_count", 87));
book.add(document);
document = new Document();
document.add(new TextField("chapters.title", "introduction", Field.Store.NO));
document.add(new StringField("_type", "__chapters", Field.Store.NO));
document.add(new IntPoint("chapters.read_time_seconds", 10));
document.add(new NumericDocValuesField("chapters.read_time_seconds", 10));
book.add(document);
document = new Document();
document.add(new StringField("genre", "cooking", Field.Store.NO));
document.add(new StringField("_type", "_doc", Field.Store.NO));
document.add(new StringField("_id", "4", Field.Store.YES));
document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0));
book.add(document);
books.add(book);
}
{
List<Document> book = new ArrayList<>();
Document document = new Document();
document.add(new StringField("genre", "unknown", Field.Store.NO));
document.add(new StringField("_type", "_doc", Field.Store.NO));
document.add(new StringField("_id", "5", Field.Store.YES));
document.add(new NumericDocValuesField(PRIMARY_TERM_NAME, 0));
book.add(document);
books.add(book);
}
Collections.shuffle(books, random());
for (List<Document> book : books) {
writer.addDocuments(book);
if (randomBoolean()) {
writer.commit();
}
}
DirectoryReader reader = DirectoryReader.open(writer);
reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId(indexService.index(), 0));
IndexSearcher searcher = new IndexSearcher(reader);
QueryShardContext queryShardContext = indexService.newQueryShardContext(0, reader, () -> 0L, null);
FieldSortBuilder sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count");
sortBuilder.setNestedSort(new NestedSortBuilder("chapters").setNestedSort(new NestedSortBuilder("chapters.paragraphs")));
QueryBuilder queryBuilder = new MatchAllQueryBuilder();
TopFieldDocs topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(5L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L));
assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4"));
assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L));
assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("1"));
assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(234L));
assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("3"));
assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(976L));
assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5"));
assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MAX_VALUE));
// Specific genre
{
queryBuilder = new TermQueryBuilder("genre", "romance");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L));
queryBuilder = new TermQueryBuilder("genre", "science fiction");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(234L));
queryBuilder = new TermQueryBuilder("genre", "horror");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L));
queryBuilder = new TermQueryBuilder("genre", "cooking");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L));
}
// reverse sort order
{
sortBuilder.order(SortOrder.DESC);
queryBuilder = new MatchAllQueryBuilder();
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(5L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L));
assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("1"));
assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(849L));
assertThat(searcher.doc(topFields.scoreDocs[2].doc).get("_id"), equalTo("4"));
assertThat(((FieldDoc) topFields.scoreDocs[2]).fields[0], equalTo(180L));
assertThat(searcher.doc(topFields.scoreDocs[3].doc).get("_id"), equalTo("2"));
assertThat(((FieldDoc) topFields.scoreDocs[3]).fields[0], equalTo(76L));
assertThat(searcher.doc(topFields.scoreDocs[4].doc).get("_id"), equalTo("5"));
assertThat(((FieldDoc) topFields.scoreDocs[4]).fields[0], equalTo(Long.MIN_VALUE));
}
// Specific genre and reverse sort order
{
queryBuilder = new TermQueryBuilder("genre", "romance");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L));
queryBuilder = new TermQueryBuilder("genre", "science fiction");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(849L));
queryBuilder = new TermQueryBuilder("genre", "horror");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L));
queryBuilder = new TermQueryBuilder("genre", "cooking");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(180L));
}
// Nested filter + query
{
queryBuilder = new RangeQueryBuilder("chapters.read_time_seconds").to(50L);
sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count");
sortBuilder.setNestedSort(
new NestedSortBuilder("chapters")
.setFilter(queryBuilder)
.setNestedSort(new NestedSortBuilder("chapters.paragraphs"))
);
topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(2L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L));
assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4"));
assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(87L));
sortBuilder.order(SortOrder.DESC);
topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(2L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L));
assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2"));
assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(76L));
}
// Multiple Nested filters + query
{
queryBuilder = new RangeQueryBuilder("chapters.read_time_seconds").to(50L);
sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count");
sortBuilder.setNestedSort(
new NestedSortBuilder("chapters")
.setFilter(queryBuilder)
.setNestedSort(
new NestedSortBuilder("chapters.paragraphs")
.setFilter(new RangeQueryBuilder("chapters.paragraphs.word_count").from(80L))
)
);
topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(2L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L));
assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2"));
assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MAX_VALUE));
sortBuilder.order(SortOrder.DESC);
topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(2L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L));
assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2"));
assertThat(((FieldDoc) topFields.scoreDocs[1]).fields[0], equalTo(Long.MIN_VALUE));
}
// Nested filter + Specific genre
{
sortBuilder = new FieldSortBuilder("chapters.paragraphs.word_count");
sortBuilder.setNestedSort(
new NestedSortBuilder("chapters")
.setFilter(new RangeQueryBuilder("chapters.read_time_seconds").to(50L))
.setNestedSort(new NestedSortBuilder("chapters.paragraphs"))
);
queryBuilder = new TermQueryBuilder("genre", "romance");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L));
queryBuilder = new TermQueryBuilder("genre", "science fiction");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE));
queryBuilder = new TermQueryBuilder("genre", "horror");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE));
queryBuilder = new TermQueryBuilder("genre", "cooking");
topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher);
assertThat(topFields.totalHits, equalTo(1L));
assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4"));
assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L));
}
}
private static TopFieldDocs search(QueryBuilder queryBuilder, FieldSortBuilder sortBuilder, QueryShardContext queryShardContext,
IndexSearcher searcher) throws IOException {
Query query = new BooleanQuery.Builder()
.add(queryBuilder.toQuery(queryShardContext), Occur.MUST)
.add(Queries.newNonNestedFilter(Version.CURRENT), Occur.FILTER)
.build();
Sort sort = new Sort(sortBuilder.build(queryShardContext).field);
return searcher.search(query, 10, sort);
}
}

View File

@ -199,6 +199,8 @@ public class HierarchyCircuitBreakerServiceTests extends ESTestCase {
.addEstimateBytesAndMaybeBreak(new ByteSizeValue(50, ByteSizeUnit.MB).getBytes(), "should break"));
assertThat(exception.getMessage(), containsString("[parent] Data too large, data for [should break] would be"));
assertThat(exception.getMessage(), containsString("which is larger than the limit of [209715200/200mb]"));
assertThat(exception.getMessage(),
containsString("usages [request=157286400/150mb, fielddata=54001664/51.5mb, in_flight_requests=0/0b, accounting=0/0b]"));
}
}
@ -239,6 +241,9 @@ public class HierarchyCircuitBreakerServiceTests extends ESTestCase {
// it was the parent that rejected the reservation
assertThat(exception.getMessage(), containsString("[parent] Data too large, data for [request] would be"));
assertThat(exception.getMessage(), containsString("which is larger than the limit of [200/200b]"));
assertThat(exception.getMessage(),
containsString("real usage: [181/181b], new bytes reserved: [" + (reservationInBytes * 2) +
"/" + new ByteSizeValue(reservationInBytes * 2) + "]"));
assertEquals(0, requestBreaker.getTrippedCount());
assertEquals(1, service.stats().getStats(CircuitBreaker.PARENT).getTrippedCount());

View File

@ -117,6 +117,12 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
return ParsedAutoDateHistogram.class;
}
@Override
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32215")
public void testReduceRandom() {
super.testReduceRandom();
}
@Override
protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram instance) {
String name = instance.getName();

View File

@ -709,6 +709,79 @@ public class SimpleNestedIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("1")); // missing last
}
// https://github.com/elastic/elasticsearch/issues/31554
public void testLeakingSortValues() throws Exception {
assertAcked(prepareCreate("test")
.setSettings(Settings.builder().put("number_of_shards", 1))
.addMapping("test-type", "{\n"
+ " \"dynamic\": \"strict\",\n"
+ " \"properties\": {\n"
+ " \"nested1\": {\n"
+ " \"type\": \"nested\",\n"
+ " \"properties\": {\n"
+ " \"nested2\": {\n"
+ " \"type\": \"nested\",\n"
+ " \"properties\": {\n"
+ " \"nested2_keyword\": {\n"
+ " \"type\": \"keyword\"\n"
+ " },\n"
+ " \"sortVal\": {\n"
+ " \"type\": \"integer\"\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ " }\n", XContentType.JSON));
ensureGreen();
client().prepareIndex("test", "test-type", "1").setSource("{\n"
+ " \"nested1\": [\n"
+ " {\n"
+ " \"nested2\": [\n"
+ " {\n"
+ " \"nested2_keyword\": \"nested2_bar\",\n"
+ " \"sortVal\": 1\n"
+ " }\n"
+ " ]\n"
+ " }\n"
+ " ]\n"
+ "}", XContentType.JSON).execute().actionGet();
client().prepareIndex("test", "test-type", "2").setSource("{\n"
+ " \"nested1\": [\n"
+ " {\n"
+ " \"nested2\": [\n"
+ " {\n"
+ " \"nested2_keyword\": \"nested2_bar\",\n"
+ " \"sortVal\": 2\n"
+ " }\n"
+ " ]\n"
+ " } \n"
+ " ]\n"
+ "}", XContentType.JSON).execute().actionGet();
refresh();
SearchResponse searchResponse = client().prepareSearch()
.setQuery(termQuery("_id", 2))
.addSort(
SortBuilders
.fieldSort("nested1.nested2.sortVal")
.setNestedSort(new NestedSortBuilder("nested1")
.setNestedSort(new NestedSortBuilder("nested1.nested2")
.setFilter(termQuery("nested1.nested2.nested2_keyword", "nested2_bar"))))
)
.execute().actionGet();
assertHitCount(searchResponse, 1);
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2"));
assertThat(searchResponse.getHits().getHits()[0].getSortValues()[0].toString(), equalTo("2"));
}
public void testSortNestedWithNestedFilter() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", XContentFactory.jsonBuilder()

View File

@ -32,8 +32,12 @@ import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.Appender;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.config.Configurator;
import org.apache.logging.log4j.core.layout.PatternLayout;
import org.apache.logging.log4j.status.StatusConsoleListener;
import org.apache.logging.log4j.status.StatusData;
import org.apache.logging.log4j.status.StatusLogger;
@ -183,6 +187,8 @@ public abstract class ESTestCase extends LuceneTestCase {
private static final AtomicInteger portGenerator = new AtomicInteger();
private static final Collection<String> nettyLoggedLeaks = new ArrayList<>();
@AfterClass
public static void resetPortCounter() {
portGenerator.set(0);
@ -192,8 +198,28 @@ public abstract class ESTestCase extends LuceneTestCase {
System.setProperty("log4j.shutdownHookEnabled", "false");
System.setProperty("log4j2.disable.jmx", "true");
// Enable Netty leak detection and monitor logger for logged leak errors
System.setProperty("io.netty.leakDetection.level", "advanced");
String leakLoggerName = "io.netty.util.ResourceLeakDetector";
Logger leakLogger = LogManager.getLogger(leakLoggerName);
Appender leakAppender = new AbstractAppender(leakLoggerName, null,
PatternLayout.newBuilder().withPattern("%m").build()) {
@Override
public void append(LogEvent event) {
String message = event.getMessage().getFormattedMessage();
if (Level.ERROR.equals(event.getLevel()) && message.contains("LEAK:")) {
synchronized (nettyLoggedLeaks) {
nettyLoggedLeaks.add(message);
}
}
}
};
leakAppender.start();
Loggers.addAppender(leakLogger, leakAppender);
// shutdown hook so that when the test JVM exits, logging is shutdown too
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
leakAppender.stop();
LoggerContext context = (LoggerContext) LogManager.getContext(false);
Configurator.shutdown(context);
}));
@ -440,6 +466,13 @@ public abstract class ESTestCase extends LuceneTestCase {
statusData.clear();
}
}
synchronized (nettyLoggedLeaks) {
try {
assertThat(nettyLoggedLeaks, empty());
} finally {
nettyLoggedLeaks.clear();
}
}
}
// this must be a separate method from other ensure checks above so suite scoped integ tests can call...TODO: fix that

View File

@ -19,8 +19,6 @@ with {security} enabled.
Elasticsearch clusters with {security} enabled apply the `/_all` wildcard, and
all other wildcards, to the indices that the current user has privileges for, not
the set of all indices on the cluster.
While creating or retrieving aliases by providing wildcard expressions for alias names, if there are no existing authorized aliases
that match the wildcard expression provided an IndexNotFoundException is returned.
[float]
=== Multi Document APIs

View File

@ -20,7 +20,6 @@ esplugin {
}
dependencyLicenses {
mapping from: /bc.*/, to: 'bouncycastle'
mapping from: /http.*/, to: 'httpclient' // pulled in by rest client
mapping from: /commons-.*/, to: 'commons' // pulled in by rest client
}
@ -38,8 +37,6 @@ dependencies {
// security deps
compile 'com.unboundid:unboundid-ldapsdk:3.2.0'
compile 'org.bouncycastle:bcprov-jdk15on:1.59'
compile 'org.bouncycastle:bcpkix-jdk15on:1.59'
compile project(path: ':modules:transport-netty4', configuration: 'runtime')
testCompile 'org.elasticsearch:securemock:1.2'
@ -116,6 +113,7 @@ task testJar(type: Jar) {
appendix 'test'
from sourceSets.test.output
}
artifacts {
// normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions
archives jar

View File

@ -120,7 +120,8 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
this.scheduler = new SchedulerEngine(clock);
this.licenseState = licenseState;
this.operationModeFileWatcher = new OperationModeFileWatcher(resourceWatcherService,
XPackPlugin.resolveConfigFile(env, "license_mode"), logger, () -> updateLicenseState(getLicense()));
XPackPlugin.resolveConfigFile(env, "license_mode"), logger,
() -> updateLicenseState(getLicensesMetaData()));
this.scheduler.register(this);
populateExpirationCallbacks();
}
@ -265,11 +266,11 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
@Override
public void triggered(SchedulerEngine.Event event) {
final LicensesMetaData licensesMetaData = clusterService.state().metaData().custom(LicensesMetaData.TYPE);
final LicensesMetaData licensesMetaData = getLicensesMetaData();
if (licensesMetaData != null) {
final License license = licensesMetaData.getLicense();
if (event.getJobName().equals(LICENSE_JOB)) {
updateLicenseState(license);
updateLicenseState(license, licensesMetaData.getMostRecentTrialVersion());
} else if (event.getJobName().startsWith(ExpirationCallback.EXPIRATION_JOB_PREFIX)) {
expirationCallbacks.stream()
.filter(expirationCallback -> expirationCallback.getId().equals(event.getJobName()))
@ -311,6 +312,10 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
return license == LicensesMetaData.LICENSE_TOMBSTONE ? null : license;
}
private LicensesMetaData getLicensesMetaData() {
return this.clusterService.state().metaData().custom(LicensesMetaData.TYPE);
}
void startTrialLicense(PostStartTrialRequest request, final ActionListener<PostStartTrialResponse> listener) {
if (VALID_TRIAL_TYPES.contains(request.getType()) == false) {
throw new IllegalArgumentException("Cannot start trial of type [" + request.getType() + "]. Valid trial types are "
@ -422,10 +427,16 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
}
}
protected void updateLicenseState(final License license) {
private void updateLicenseState(LicensesMetaData licensesMetaData) {
if (licensesMetaData != null) {
updateLicenseState(getLicense(licensesMetaData), licensesMetaData.getMostRecentTrialVersion());
}
}
protected void updateLicenseState(final License license, Version mostRecentTrialVersion) {
if (license == LicensesMetaData.LICENSE_TOMBSTONE) {
// implies license has been explicitly deleted
licenseState.update(License.OperationMode.MISSING, false);
licenseState.update(License.OperationMode.MISSING, false, mostRecentTrialVersion);
return;
}
if (license != null) {
@ -438,7 +449,7 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
// date that is near Long.MAX_VALUE
active = time >= license.issueDate() && time - GRACE_PERIOD_DURATION.getMillis() < license.expiryDate();
}
licenseState.update(license.operationMode(), active);
licenseState.update(license.operationMode(), active, mostRecentTrialVersion);
if (active) {
if (time < license.expiryDate()) {
@ -480,7 +491,7 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste
logger.info("license [{}] mode [{}] - valid", license.uid(),
license.operationMode().name().toLowerCase(Locale.ROOT));
}
updateLicenseState(license);
updateLicenseState(license, currentLicensesMetaData.getMostRecentTrialVersion());
}
}

View File

@ -5,8 +5,11 @@
*/
package org.elasticsearch.license;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.License.OperationMode;
import org.elasticsearch.xpack.core.XPackField;
@ -266,6 +269,7 @@ public class XPackLicenseState {
private final List<Runnable> listeners = new CopyOnWriteArrayList<>();
private final boolean isSecurityEnabled;
private final boolean isSecurityExplicitlyEnabled;
private volatile boolean isSecurityEnabledByTrialVersion;
public XPackLicenseState(Settings settings) {
this.isSecurityEnabled = XPackSettings.SECURITY_ENABLED.get(settings);
@ -274,11 +278,30 @@ public class XPackLicenseState {
// setting is not explicitly set
this.isSecurityExplicitlyEnabled = isSecurityEnabled &&
(settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) || XPackSettings.TRANSPORT_SSL_ENABLED.get(settings));
this.isSecurityEnabledByTrialVersion = false;
}
/** Updates the current state of the license, which will change what features are available. */
void update(OperationMode mode, boolean active) {
/**
* Updates the current state of the license, which will change what features are available.
*
* @param mode The mode (type) of the current license.
* @param active True if the current license exists and is within its allowed usage period; false if it is expired or missing.
* @param mostRecentTrialVersion If this cluster has, at some point commenced a trial, the most recent version on which they did that.
* May be {@code null} if they have never generated a trial license on this cluster, or the most recent
* trial was prior to this metadata being tracked (6.1)
*/
void update(OperationMode mode, boolean active, @Nullable Version mostRecentTrialVersion) {
status = new Status(mode, active);
if (isSecurityEnabled == true && isSecurityExplicitlyEnabled == false && mode == OperationMode.TRIAL
&& isSecurityEnabledByTrialVersion == false) {
// Before 6.3, Trial licenses would default having security enabled.
// If this license was generated before that version, then treat it as if security is explicitly enabled
if (mostRecentTrialVersion == null || mostRecentTrialVersion.before(Version.V_6_3_0)) {
Loggers.getLogger(getClass()).info("Automatically enabling security for older trial license ({})",
mostRecentTrialVersion == null ? "[pre 6.1.0]" : mostRecentTrialVersion.toString());
isSecurityEnabledByTrialVersion = true;
}
}
listeners.forEach(Runnable::run);
}
@ -587,6 +610,6 @@ public class XPackLicenseState {
public boolean isSecurityEnabled() {
final OperationMode mode = status.mode;
return mode == OperationMode.TRIAL ? isSecurityExplicitlyEnabled : isSecurityEnabled;
return mode == OperationMode.TRIAL ? (isSecurityExplicitlyEnabled || isSecurityEnabledByTrialVersion) : isSecurityEnabled;
}
}

View File

@ -63,7 +63,7 @@ public class CertParsingUtils {
return PathUtils.get(path).normalize();
}
static KeyStore readKeyStore(Path path, String type, char[] password)
public static KeyStore readKeyStore(Path path, String type, char[] password)
throws IOException, KeyStoreException, CertificateException, NoSuchAlgorithmException {
try (InputStream in = Files.newInputStream(path)) {
KeyStore store = KeyStore.getInstance(type);
@ -108,7 +108,7 @@ public class CertParsingUtils {
return certificates.toArray(new X509Certificate[0]);
}
static List<Certificate> readCertificates(InputStream input) throws CertificateException, IOException {
public static List<Certificate> readCertificates(InputStream input) throws CertificateException, IOException {
CertificateFactory certFactory = CertificateFactory.getInstance("X.509");
Collection<Certificate> certificates = (Collection<Certificate>) certFactory.generateCertificates(input);
return new ArrayList<>(certificates);
@ -140,7 +140,7 @@ public class CertParsingUtils {
/**
* Creates a {@link KeyStore} from a PEM encoded certificate and key file
*/
static KeyStore getKeyStoreFromPEM(Path certificatePath, Path keyPath, char[] keyPassword)
public static KeyStore getKeyStoreFromPEM(Path certificatePath, Path keyPath, char[] keyPassword)
throws IOException, CertificateException, KeyStoreException, NoSuchAlgorithmException {
final PrivateKey key = PemUtils.readPrivateKey(keyPath, () -> keyPassword);
final Certificate[] certificates = readCertificates(Collections.singletonList(certificatePath));
@ -168,7 +168,7 @@ public class CertParsingUtils {
/**
* Returns a {@link X509ExtendedKeyManager} that is built from the provided keystore
*/
static X509ExtendedKeyManager keyManager(KeyStore keyStore, char[] password, String algorithm)
public static X509ExtendedKeyManager keyManager(KeyStore keyStore, char[] password, String algorithm)
throws NoSuchAlgorithmException, UnrecoverableKeyException, KeyStoreException {
KeyManagerFactory kmf = KeyManagerFactory.getInstance(algorithm);
kmf.init(keyStore, password);
@ -271,7 +271,7 @@ public class CertParsingUtils {
/**
* Creates a {@link X509ExtendedTrustManager} based on the trust material in the provided {@link KeyStore}
*/
static X509ExtendedTrustManager trustManager(KeyStore keyStore, String algorithm)
public static X509ExtendedTrustManager trustManager(KeyStore keyStore, String algorithm)
throws NoSuchAlgorithmException, KeyStoreException {
TrustManagerFactory tmf = TrustManagerFactory.getInstance(algorithm);
tmf.init(keyStore);

View File

@ -6,6 +6,7 @@
package org.elasticsearch.license;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Strings;
@ -353,20 +354,22 @@ public class TestUtils {
public static class AssertingLicenseState extends XPackLicenseState {
public final List<License.OperationMode> modeUpdates = new ArrayList<>();
public final List<Boolean> activeUpdates = new ArrayList<>();
public final List<Version> trialVersionUpdates = new ArrayList<>();
public AssertingLicenseState() {
super(Settings.EMPTY);
}
@Override
void update(License.OperationMode mode, boolean active) {
void update(License.OperationMode mode, boolean active, Version mostRecentTrialVersion) {
modeUpdates.add(mode);
activeUpdates.add(active);
trialVersionUpdates.add(mostRecentTrialVersion);
}
}
/**
* A license state that makes the {@link #update(License.OperationMode, boolean)}
* A license state that makes the {@link #update(License.OperationMode, boolean, Version)}
* method public for use in tests.
*/
public static class UpdatableLicenseState extends XPackLicenseState {
@ -379,8 +382,8 @@ public class TestUtils {
}
@Override
public void update(License.OperationMode mode, boolean active) {
super.update(mode, active);
public void update(License.OperationMode mode, boolean active, Version mostRecentTrialVersion) {
super.update(mode, active, mostRecentTrialVersion);
}
}

View File

@ -5,9 +5,11 @@
*/
package org.elasticsearch.license;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.License.OperationMode;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.XPackSettings;
@ -31,7 +33,7 @@ public class XPackLicenseStateTests extends ESTestCase {
/** Creates a license state with the given license type and active state, and checks the given method returns expected. */
void assertAllowed(OperationMode mode, boolean active, Predicate<XPackLicenseState> predicate, boolean expected) {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(mode, active);
licenseState.update(mode, active, null);
assertEquals(expected, predicate.test(licenseState));
}
@ -102,7 +104,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSecurityBasic() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(BASIC, true);
licenseState.update(BASIC, true, null);
assertThat(licenseState.isAuthAllowed(), is(false));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
@ -116,7 +118,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSecurityBasicExpired() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(BASIC, false);
licenseState.update(BASIC, false, null);
assertThat(licenseState.isAuthAllowed(), is(false));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
@ -130,7 +132,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSecurityStandard() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(STANDARD, true);
licenseState.update(STANDARD, true, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
@ -144,7 +146,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSecurityStandardExpired() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(STANDARD, false);
licenseState.update(STANDARD, false, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(false));
@ -158,7 +160,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSecurityGold() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(GOLD, true);
licenseState.update(GOLD, true, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
@ -172,7 +174,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSecurityGoldExpired() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(GOLD, false);
licenseState.update(GOLD, false, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
@ -186,7 +188,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSecurityPlatinum() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(PLATINUM, true);
licenseState.update(PLATINUM, true, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
@ -200,7 +202,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSecurityPlatinumExpired() {
XPackLicenseState licenseState = new XPackLicenseState(randomFrom(Settings.EMPTY,
Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build()));
licenseState.update(PLATINUM, false);
licenseState.update(PLATINUM, false, null);
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
@ -211,6 +213,34 @@ public class XPackLicenseStateTests extends ESTestCase {
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(false));
}
public void testNewTrialDefaultsSecurityOff() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(TRIAL, true, VersionUtils.randomVersionBetween(random(), Version.V_6_3_0, Version.CURRENT));
assertThat(licenseState.isSecurityEnabled(), is(false));
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
assertThat(licenseState.isAuditingAllowed(), is(true));
assertThat(licenseState.isStatsAndHealthAllowed(), is(true));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(true));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.ALL));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(true));
}
public void testOldTrialDefaultsSecurityOn() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(TRIAL, true, rarely() ? null : VersionUtils.randomVersionBetween(random(), Version.V_5_6_0, Version.V_6_2_4));
assertThat(licenseState.isSecurityEnabled(), is(true));
assertThat(licenseState.isAuthAllowed(), is(true));
assertThat(licenseState.isIpFilteringAllowed(), is(true));
assertThat(licenseState.isAuditingAllowed(), is(true));
assertThat(licenseState.isStatsAndHealthAllowed(), is(true));
assertThat(licenseState.isDocumentAndFieldLevelSecurityAllowed(), is(true));
assertThat(licenseState.allowedRealmType(), is(XPackLicenseState.AllowedRealmType.ALL));
assertThat(licenseState.isCustomRoleProvidersAllowed(), is(true));
}
public void testSecurityAckBasicToNotGoldOrStandard() {
OperationMode toMode = randomFrom(OperationMode.values(), mode -> mode != GOLD && mode != STANDARD);
assertAckMesssages(XPackField.SECURITY, BASIC, toMode, 0);
@ -354,7 +384,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSqlBasic() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(BASIC, true);
licenseState.update(BASIC, true, null);
assertThat(licenseState.isSqlAllowed(), is(true));
assertThat(licenseState.isJdbcAllowed(), is(false));
@ -362,7 +392,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSqlBasicExpired() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(BASIC, false);
licenseState.update(BASIC, false, null);
assertThat(licenseState.isSqlAllowed(), is(false));
assertThat(licenseState.isJdbcAllowed(), is(false));
@ -370,7 +400,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSqlStandard() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(STANDARD, true);
licenseState.update(STANDARD, true, null);
assertThat(licenseState.isSqlAllowed(), is(true));
assertThat(licenseState.isJdbcAllowed(), is(false));
@ -378,7 +408,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSqlStandardExpired() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(STANDARD, false);
licenseState.update(STANDARD, false, null);
assertThat(licenseState.isSqlAllowed(), is(false));
assertThat(licenseState.isJdbcAllowed(), is(false));
@ -386,7 +416,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSqlGold() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(GOLD, true);
licenseState.update(GOLD, true, null);
assertThat(licenseState.isSqlAllowed(), is(true));
assertThat(licenseState.isJdbcAllowed(), is(false));
@ -394,7 +424,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSqlGoldExpired() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(GOLD, false);
licenseState.update(GOLD, false, null);
assertThat(licenseState.isSqlAllowed(), is(false));
assertThat(licenseState.isJdbcAllowed(), is(false));
@ -402,7 +432,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSqlPlatinum() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(PLATINUM, true);
licenseState.update(PLATINUM, true, null);
assertThat(licenseState.isSqlAllowed(), is(true));
assertThat(licenseState.isJdbcAllowed(), is(true));
@ -410,7 +440,7 @@ public class XPackLicenseStateTests extends ESTestCase {
public void testSqlPlatinumExpired() {
XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY);
licenseState.update(PLATINUM, false);
licenseState.update(PLATINUM, false, null);
assertThat(licenseState.isSqlAllowed(), is(false));
assertThat(licenseState.isJdbcAllowed(), is(false));

View File

@ -635,11 +635,7 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu
// TODO review these settings
.settings(Settings.builder()
.put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting)
// Sacrifice durability for performance: in the event of power
// failure we can lose the last 5 seconds of changes, but it's
// much faster
.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async"))
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), delayedNodeTimeOutSetting))
.putMapping(ElasticsearchMappings.DOC_TYPE, Strings.toString(stateMapping))
.version(Version.CURRENT.id)
.build();

View File

@ -551,7 +551,7 @@ public class MachineLearningLicensingTests extends BaseMlIntegTestCase {
public static void disableLicensing(License.OperationMode operationMode) {
for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) {
licenseState.update(operationMode, false);
licenseState.update(operationMode, false, null);
}
}
@ -561,7 +561,7 @@ public class MachineLearningLicensingTests extends BaseMlIntegTestCase {
public static void enableLicensing(License.OperationMode operationMode) {
for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) {
licenseState.update(operationMode, true);
licenseState.update(operationMode, true, null);
}
}
}

View File

@ -22,8 +22,8 @@ dependencies {
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
compile 'com.unboundid:unboundid-ldapsdk:3.2.0'
compile 'org.bouncycastle:bcprov-jdk15on:1.59'
compile 'org.bouncycastle:bcpkix-jdk15on:1.59'
compileOnly 'org.bouncycastle:bcprov-jdk15on:1.59'
compileOnly 'org.bouncycastle:bcpkix-jdk15on:1.59'
// the following are all SAML dependencies - might as well download the whole internet
compile "org.opensaml:opensaml-core:3.3.0"
@ -79,7 +79,6 @@ sourceSets.test.resources {
srcDir '../core/src/test/resources'
}
dependencyLicenses {
mapping from: /bc.*/, to: 'bouncycastle'
mapping from: /java-support|opensaml-.*/, to: 'shibboleth'
mapping from: /http.*/, to: 'httpclient'
}

View File

@ -0,0 +1,20 @@
apply plugin: 'elasticsearch.build'
archivesBaseName = 'elasticsearch-security-cli'
dependencies {
compileOnly "org.elasticsearch:elasticsearch:${version}"
compileOnly xpackProject('plugin:core')
compile 'org.bouncycastle:bcprov-jdk15on:1.59'
compile 'org.bouncycastle:bcpkix-jdk15on:1.59'
testImplementation 'com.google.jimfs:jimfs:1.1'
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
testCompile 'org.elasticsearch:securemock:1.2'
testCompile "org.elasticsearch.test:framework:${version}"
testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
}
dependencyLicenses {
mapping from: /bc.*/, to: 'bouncycastle'
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ssl;
package org.elasticsearch.xpack.security.cli;
import org.bouncycastle.asn1.ASN1Encodable;
import org.bouncycastle.asn1.ASN1ObjectIdentifier;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ssl;
package org.elasticsearch.xpack.security.cli;
import joptsimple.ArgumentAcceptingOptionSpec;
import joptsimple.OptionSet;
@ -34,6 +34,8 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.core.ssl.CertParsingUtils;
import org.elasticsearch.xpack.core.ssl.PemUtils;
import javax.security.auth.x500.X500Principal;
@ -68,6 +70,7 @@ import java.util.zip.ZipOutputStream;
/**
* CLI tool to make generation of certificates or certificate requests easier for users
*
* @deprecated Replaced by {@link CertificateTool}
*/
@Deprecated
@ -223,6 +226,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand {
/**
* This method handles the collection of information about each instance that is necessary to generate a certificate. The user may
* be prompted or the information can be gathered from a file
*
* @param terminal the terminal to use for user interaction
* @param inputFile an optional file that will be used to load the instance information
* @return a {@link Collection} of {@link CertificateInformation} that represents each instance
@ -298,6 +302,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand {
/**
* Parses the input file to retrieve the certificate information
*
* @param file the file to parse
* @return a collection of certificate information
*/
@ -312,6 +317,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand {
/**
* Generates certificate signing requests and writes them out to the specified file in zip format
*
* @param outputFile the file to write the output to. This file must not already exist
* @param certInfo the details to use in the certificate signing requests
*/
@ -388,6 +394,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand {
/**
* Generates signed certificates in PEM format stored in a zip file
*
* @param outputFile the file that the certificates will be written to. This file must not exist
* @param certificateInformations details for creation of the certificates
* @param caInfo the CA information to sign the certificates with
@ -441,6 +448,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand {
/**
* This method handles the deletion of a file in the case of a partial write
*
* @param file the file that is being written to
* @param writer writes the contents of the file
*/
@ -468,6 +476,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand {
/**
* This method handles writing out the certificate authority cert and private key if the certificate authority was generated by
* this invocation of the tool
*
* @param outputStream the output stream to write to
* @param pemWriter the writer for PEM objects
* @param info the certificate authority information
@ -577,6 +586,7 @@ public class CertificateGenerateTool extends EnvironmentAwareCommand {
/**
* Helper method to read a private key and support prompting of user for a key. To avoid passwords being placed as an argument we
* can prompt the user for their password if we encounter an encrypted key.
*
* @param path the path to the private key
* @param password the password provided by the user or {@code null}
* @param terminal the terminal to use for user interaction

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ssl;
package org.elasticsearch.xpack.security.cli;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
@ -39,6 +39,8 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.core.ssl.CertParsingUtils;
import org.elasticsearch.xpack.core.ssl.PemUtils;
import javax.security.auth.x500.X500Principal;

View File

@ -4,7 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ssl;
package org.elasticsearch.xpack.security.cli;
import org.bouncycastle.asn1.x509.GeneralName;
import org.bouncycastle.asn1.x509.GeneralNames;
@ -12,6 +12,7 @@ import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.security.cli.CertGenUtils;
import org.junit.BeforeClass;
import java.math.BigInteger;

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ssl;
package org.elasticsearch.xpack.security.cli;
import com.google.common.jimfs.Configuration;
import com.google.common.jimfs.Jimfs;
@ -33,9 +33,11 @@ import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.SecuritySettingsSourceField;
import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.CAInfo;
import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.CertificateInformation;
import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.Name;
import org.elasticsearch.xpack.security.cli.CertificateGenerateTool.CAInfo;
import org.elasticsearch.xpack.security.cli.CertificateGenerateTool.CertificateInformation;
import org.elasticsearch.xpack.security.cli.CertificateGenerateTool.Name;
import org.elasticsearch.xpack.core.ssl.CertParsingUtils;
import org.elasticsearch.xpack.core.ssl.PemUtils;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.BeforeClass;
@ -359,8 +361,8 @@ public class CertificateGenerateToolTests extends ESTestCase {
public void testGetCAInfo() throws Exception {
Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build());
Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt");
Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem");
Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.crt");
Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.pem");
final boolean passwordPrompt = randomBoolean();
MockTerminal terminal = new MockTerminal();
if (passwordPrompt) {

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ssl;
package org.elasticsearch.xpack.security.cli;
import com.google.common.jimfs.Configuration;
import com.google.common.jimfs.Jimfs;
@ -39,12 +39,14 @@ import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.SecuritySettingsSourceField;
import org.elasticsearch.test.TestMatchers;
import org.elasticsearch.xpack.core.ssl.CertificateTool.CAInfo;
import org.elasticsearch.xpack.core.ssl.CertificateTool.CertificateAuthorityCommand;
import org.elasticsearch.xpack.core.ssl.CertificateTool.CertificateCommand;
import org.elasticsearch.xpack.core.ssl.CertificateTool.CertificateInformation;
import org.elasticsearch.xpack.core.ssl.CertificateTool.GenerateCertificateCommand;
import org.elasticsearch.xpack.core.ssl.CertificateTool.Name;
import org.elasticsearch.xpack.security.cli.CertificateTool.CAInfo;
import org.elasticsearch.xpack.security.cli.CertificateTool.CertificateAuthorityCommand;
import org.elasticsearch.xpack.security.cli.CertificateTool.CertificateCommand;
import org.elasticsearch.xpack.security.cli.CertificateTool.CertificateInformation;
import org.elasticsearch.xpack.security.cli.CertificateTool.GenerateCertificateCommand;
import org.elasticsearch.xpack.security.cli.CertificateTool.Name;
import org.elasticsearch.xpack.core.ssl.CertParsingUtils;
import org.elasticsearch.xpack.core.ssl.PemUtils;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.BeforeClass;
@ -387,8 +389,8 @@ public class CertificateToolTests extends ESTestCase {
public void testGetCAInfo() throws Exception {
Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build());
Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt");
Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem");
Path testNodeCertPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.crt");
Path testNodeKeyPath = getDataPath("/org/elasticsearch/xpack/security/cli/testnode.pem");
final boolean passwordPrompt = randomBoolean();
MockTerminal terminal = new MockTerminal();
if (passwordPrompt) {

View File

@ -0,0 +1,30 @@
-----BEGIN RSA PRIVATE KEY-----
Proc-Type: 4,ENCRYPTED
DEK-Info: DES-EDE3-CBC,9D867F7E0C94D013
dVoVCjPeg1wgS7rVtOvGfQcrZyLkx393aWRnFq45tbjKBVuITtJ9vI7o4QXOV/15
Gnb6WhXGIdWrzsxEAd46K6hIuNSISd4Emsx6c2Q5hTqWXXfexbOZBNfTtXtdJPnJ
1jAaikhtztLo3JSLTKNY5sNxd+XbaQyYVUWvueK6zOaIIMETvB+VPVFd9i1ROibk
Sgdtyj01KjkoalifqK/tA0CIYNKL0S6/eoK3UhAlpIprlpV+cnXa940C6bjLeJPt
PMAGGp5RrplxSgrSerw3I9DOWkHGtpqzIka3XneNUXJP8k4HUJ+aZkGH2ZILKS8d
4KMIb+KZSpHEGn+6uGccWLtZZmAjWJrDw56JbQtSHdRYLBRSOjLbTvQoPu/2Hpli
7HOxbotlvjptMunncq5aqK57SHA1dh0cwF7J3LUmGFJ67eoz+VV3b5qMn4MopSeI
mS16Ydd3nGpjSrln/elM0CQxqWfcOAXRZpDpFUQoXcBrLVzvz2DBl/0CrTRLhgzi
CO+5/IVcBWRlYpRNGgjjP7q0j6URID3jk5J06fYQXmBiwQT5j+GZqqzpMCJ9mIy2
1O9SN1hebJnIcEU+E0njn/MGjlYdPywhaCy8pqElp6Q8TUEJpwLRFO/owCoBet/n
ZmCXUjfCGhc1pWHufFcDEQ6xMgEWWY/tdwCZeSU7EhErTjCbfupg+55A5fpDml0m
3wH4CFcuRjlqyx6Ywixm1ATeitDtJl5HQTw6b8OtEXwSgRmZ0eSqSRVk9QbVS7gu
IpQe09/Zimb5HzjZqZ3fdqHlcW4xax8hyJeyIvF5ZJ57eY8CBvu/wP2GDn26QnvF
xQqdfDbq1H4JmpwUHpbFwBoQK4Q6WFd1z4EA9bRQeo3H9PoqoOwMDjzajwLRF7b7
q6tYH/n9PyHwdf1c4fFwgSmL1toXGfKlA9hjIaLsRSDD6srT5EdUk78bsnddwI51
tu7C7P4JG+h1VdRNMNTlqtileWsIE7Nn2A1OkcUxZdF5mamENpDpJcHePLto6c8q
FKiwyFMsxhgsj6HK2HqO+UA4sX5Ni4oHwiPmb//EZLn045M5i1AN26KosJmb8++D
sgR5reWRy+UqJCTYblVg+7Dx++ggUnfxVyQEsWmw5r5f4KU5wXBkvoVMGtPNa9DE
n/uLtObD1qkNL38pRsr2OGRchYCgEoKGqEISBP4knfGXLOlWiW/246j9QzI97r1u
tvy7fKg28G7AUz9l6bpewsPHefBUeRQeieP9eJINaEpxkF/w2RpKDLpQjWxwDDOM
s+D0mrBMJve17AmJ8rMw6dIQPZYNZ88/jz1uQuUwQ2YlbmtZbCG81k9YMFGEU9XS
cyhJxj8hvYnt2PR5Z9/cJPyWOs0m/ufOeeQQ8SnU/lzmrQnpzUd2Z6p5i/B7LdRP
n1kX+l1qynuPnjvBz4nJQE0p6nzW8RyCDSniC9mtYtZmhgC8icqxgbvS7uEOBIYJ
NbK+0bEETTO34iY/JVTIqLOw3iQZYMeUpxpj6Phgx/oooxMTquMecPKNgeVtaBst
qjTNPX0ti1/HYpZqzYi8SV8YjHSJWCVMsZjKPr3W/HIcCKqYoIfgzi83Ha2KMQx6
-----END RSA PRIVATE KEY-----

View File

@ -4,7 +4,8 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
ES_MAIN_CLASS=org.elasticsearch.xpack.core.ssl.CertificateGenerateTool \
ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.CertificateGenerateTool \
ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \
ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \
"`dirname "$0"`"/elasticsearch-cli \
"$@"

View File

@ -4,7 +4,8 @@
# or more contributor license agreements. Licensed under the Elastic License;
# you may not use this file except in compliance with the Elastic License.
ES_MAIN_CLASS=org.elasticsearch.xpack.core.ssl.CertificateTool \
ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.CertificateTool \
ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \
ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \
"`dirname "$0"`"/elasticsearch-cli \
"$@"

View File

@ -20,7 +20,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.ClusterSettings;
@ -200,6 +199,8 @@ class IndicesAndAliasesResolver {
if (aliasesRequest.expandAliasesWildcards()) {
List<String> aliases = replaceWildcardsWithAuthorizedAliases(aliasesRequest.aliases(),
loadAuthorizedAliases(authorizedIndices.get(), metaData));
//it may be that we replace aliases with an empty array, in case there are no authorized aliases for the action.
//MetaData#findAliases will return nothing when some alias was originally requested, which was replaced with empty.
aliasesRequest.replaceAliases(aliases.toArray(new String[aliases.size()]));
}
if (indicesReplacedWithNoIndices) {
@ -240,8 +241,7 @@ class IndicesAndAliasesResolver {
} else {
// the user is not authorized to put mappings for this index, but could have been
// authorized for a write using an alias that triggered a dynamic mapping update
ImmutableOpenMap<String, List<AliasMetaData>> foundAliases =
metaData.findAliases(Strings.EMPTY_ARRAY, new String[] { concreteIndexName });
ImmutableOpenMap<String, List<AliasMetaData>> foundAliases = metaData.findAllAliases(new String[] { concreteIndexName });
List<AliasMetaData> aliasMetaData = foundAliases.get(concreteIndexName);
if (aliasMetaData != null) {
Optional<String> foundAlias = aliasMetaData.stream()
@ -279,14 +279,12 @@ class IndicesAndAliasesResolver {
List<String> finalAliases = new ArrayList<>();
//IndicesAliasesRequest doesn't support empty aliases (validation fails) but GetAliasesRequest does (in which case empty means _all)
boolean matchAllAliases = aliases.length == 0;
if (matchAllAliases) {
if (aliases.length == 0) {
finalAliases.addAll(authorizedAliases);
}
for (String aliasPattern : aliases) {
if (aliasPattern.equals(MetaData.ALL)) {
matchAllAliases = true;
finalAliases.addAll(authorizedAliases);
} else if (Regex.isSimpleMatchPattern(aliasPattern)) {
for (String authorizedAlias : authorizedAliases) {
@ -298,16 +296,6 @@ class IndicesAndAliasesResolver {
finalAliases.add(aliasPattern);
}
}
//Throw exception if the wildcards expansion to authorized aliases resulted in no indices.
//We always need to replace wildcards for security reasons, to make sure that the operation is executed on the aliases that we
//authorized it to execute on. Empty set gets converted to _all by es core though, and unlike with indices, here we don't have
//a special expression to replace empty set with, which gives us the guarantee that nothing will be returned.
//This is because existing aliases can contain all kinds of special characters, they are only validated since 5.1.
if (finalAliases.isEmpty()) {
String indexName = matchAllAliases ? MetaData.ALL : Arrays.toString(aliases);
throw new IndexNotFoundException(indexName);
}
return finalAliases;
}

View File

@ -307,7 +307,7 @@ public class LicensingTests extends SecurityIntegTestCase {
public static void disableLicensing(License.OperationMode operationMode) {
for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) {
licenseState.update(operationMode, false);
licenseState.update(operationMode, false, null);
}
}
@ -317,7 +317,7 @@ public class LicensingTests extends SecurityIntegTestCase {
public static void enableLicensing(License.OperationMode operationMode) {
for (XPackLicenseState licenseState : internalCluster().getInstances(XPackLicenseState.class)) {
licenseState.update(operationMode, true);
licenseState.update(operationMode, true, null);
}
}
}

View File

@ -399,7 +399,8 @@ public class SecurityTests extends ESTestCase {
createComponents(Settings.EMPTY);
Function<String, Predicate<String>> fieldFilter = security.getFieldFilter();
assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter);
licenseState.update(randomFrom(License.OperationMode.BASIC, License.OperationMode.STANDARD, License.OperationMode.GOLD), true);
licenseState.update(
randomFrom(License.OperationMode.BASIC, License.OperationMode.STANDARD, License.OperationMode.GOLD), true, null);
assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter);
assertSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply(randomAlphaOfLengthBetween(3, 6)));
}

Some files were not shown because too many files have changed in this diff Show More