diff --git a/.ci/java-versions.properties b/.ci/java-versions.properties
index 9202be97bd9..c902a692071 100644
--- a/.ci/java-versions.properties
+++ b/.ci/java-versions.properties
@@ -6,3 +6,5 @@
ES_BUILD_JAVA=java11
ES_RUNTIME_JAVA=java8
+GRADLE_TASK=build
+
diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml
index 2ff4b3e1d34..bfe5d9504c0 100644
--- a/buildSrc/src/main/resources/checkstyle_suppressions.xml
+++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml
@@ -7,8 +7,8 @@
-
-
+
+
@@ -42,7 +42,7 @@
-
+
@@ -154,7 +154,6 @@
-
@@ -182,7 +181,6 @@
-
@@ -237,7 +235,6 @@
-
@@ -259,7 +256,6 @@
-
@@ -267,7 +263,6 @@
-
@@ -283,13 +278,10 @@
-
-
-
@@ -307,18 +299,15 @@
-
-
-
@@ -326,7 +315,6 @@
-
@@ -360,15 +348,11 @@
-
-
-
-
@@ -380,12 +364,10 @@
-
-
@@ -435,13 +417,10 @@
-
-
-
@@ -550,18 +529,15 @@
-
-
-
@@ -572,11 +548,9 @@
-
-
@@ -597,7 +571,6 @@
-
@@ -637,9 +610,6 @@
-
-
-
@@ -650,12 +620,10 @@
-
-
@@ -667,7 +635,6 @@
-
@@ -684,7 +651,6 @@
-
@@ -716,10 +682,6 @@
-
-
-
-
diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle
index bfe7c3d956c..1a41a493586 100644
--- a/client/rest-high-level/build.gradle
+++ b/client/rest-high-level/build.gradle
@@ -85,6 +85,7 @@ integTestRunner {
}
integTestCluster {
+ systemProperty 'es.scripting.update.ctx_in_params', 'false'
setting 'xpack.license.self_generated.type', 'trial'
setting 'xpack.security.enabled', 'true'
// Truststore settings are not used since TLS is not enabled. Included for testing the get certificates API
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
index 79d2ccacf61..4e3f778cd15 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
@@ -280,7 +280,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
.startObject()
.startObject("script")
.field("lang", "painless")
- .field("code", "ctx._source.field += params.count")
+ .field("source", "ctx._source.field += params.count")
.endObject()
.endObject()));
Response response = client().performRequest(request);
@@ -991,10 +991,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// tag::update-by-query-request-conflicts
request.setConflicts("proceed"); // <1>
// end::update-by-query-request-conflicts
- // tag::update-by-query-request-typeOrQuery
- request.setDocTypes("doc"); // <1>
- request.setQuery(new TermQueryBuilder("user", "kimchy")); // <2>
- // end::update-by-query-request-typeOrQuery
+ // tag::update-by-query-request-query
+ request.setQuery(new TermQueryBuilder("user", "kimchy")); // <1>
+ // end::update-by-query-request-query
// tag::update-by-query-request-size
request.setSize(10); // <1>
// end::update-by-query-request-size
@@ -1110,10 +1109,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// tag::delete-by-query-request-conflicts
request.setConflicts("proceed"); // <1>
// end::delete-by-query-request-conflicts
- // tag::delete-by-query-request-typeOrQuery
- request.setDocTypes("doc"); // <1>
- request.setQuery(new TermQueryBuilder("user", "kimchy")); // <2>
- // end::delete-by-query-request-typeOrQuery
+ // tag::delete-by-query-request-query
+ request.setQuery(new TermQueryBuilder("user", "kimchy")); // <1>
+ // end::delete-by-query-request-query
// tag::delete-by-query-request-size
request.setSize(10); // <1>
// end::delete-by-query-request-size
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
index 4382924bb97..1e596158750 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
@@ -140,10 +140,9 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
// end::search-request-basic
}
{
- // tag::search-request-indices-types
+ // tag::search-request-indices
SearchRequest searchRequest = new SearchRequest("posts"); // <1>
- searchRequest.types("doc"); // <2>
- // end::search-request-indices-types
+ // end::search-request-indices
// tag::search-request-routing
searchRequest.routing("routing"); // <1>
// end::search-request-routing
diff --git a/docs/java-rest/high-level/document/delete-by-query.asciidoc b/docs/java-rest/high-level/document/delete-by-query.asciidoc
index 5ec246a9121..3a4c8a15dea 100644
--- a/docs/java-rest/high-level/document/delete-by-query.asciidoc
+++ b/docs/java-rest/high-level/document/delete-by-query.asciidoc
@@ -24,14 +24,13 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-con
--------------------------------------------------
<1> Set `proceed` on version conflict
-You can limit the documents by adding a type to the source or by adding a query.
+You can limit the documents by adding a query.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
-include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-typeOrQuery]
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-query]
--------------------------------------------------
-<1> Only copy `doc` type
-<2> Only copy documents which have field `user` set to `kimchy`
+<1> Only copy documents which have field `user` set to `kimchy`
It’s also possible to limit the number of processed documents by setting size.
diff --git a/docs/java-rest/high-level/document/update-by-query.asciidoc b/docs/java-rest/high-level/document/update-by-query.asciidoc
index 324385a442b..5c7e4f5d3b0 100644
--- a/docs/java-rest/high-level/document/update-by-query.asciidoc
+++ b/docs/java-rest/high-level/document/update-by-query.asciidoc
@@ -25,14 +25,13 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-con
--------------------------------------------------
<1> Set `proceed` on version conflict
-You can limit the documents by adding a type to the source or by adding a query.
+You can limit the documents by adding a query.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
-include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-typeOrQuery]
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-query]
--------------------------------------------------
-<1> Only copy `doc` type
-<2> Only copy documents which have field `user` set to `kimchy`
+<1> Only copy documents which have field `user` set to `kimchy`
It’s also possible to limit the number of processed documents by setting size.
diff --git a/docs/java-rest/high-level/search/multi-search.asciidoc b/docs/java-rest/high-level/search/multi-search.asciidoc
index 5d5910be300..279b1c7fa39 100644
--- a/docs/java-rest/high-level/search/multi-search.asciidoc
+++ b/docs/java-rest/high-level/search/multi-search.asciidoc
@@ -28,10 +28,9 @@ For example:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
-include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-indices-types]
+include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-indices]
--------------------------------------------------
<1> Restricts the request to an index
-<2> Limits the request to a type
[[java-rest-high-multi-search-sync]]
==== Synchronous Execution
diff --git a/docs/java-rest/high-level/search/search.asciidoc b/docs/java-rest/high-level/search/search.asciidoc
index e2bcfda79e6..81e680b9250 100644
--- a/docs/java-rest/high-level/search/search.asciidoc
+++ b/docs/java-rest/high-level/search/search.asciidoc
@@ -33,10 +33,9 @@ Let's first look at some of the optional arguments of a +{request}+:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
-include-tagged::{doc-tests-file}[{api}-request-indices-types]
+include-tagged::{doc-tests-file}[{api}-request-indices]
--------------------------------------------------
<1> Restricts the request to an index
-<2> Limits the request to a type
There are a couple of other interesting optional parameters:
diff --git a/docs/reference/upgrade/cluster_restart.asciidoc b/docs/reference/upgrade/cluster_restart.asciidoc
index 06d5e96f8ef..85b6fffdb2e 100644
--- a/docs/reference/upgrade/cluster_restart.asciidoc
+++ b/docs/reference/upgrade/cluster_restart.asciidoc
@@ -38,6 +38,10 @@ include::shut-down-node.asciidoc[]
. *Upgrade all nodes.*
+
--
+include::remove-xpack.asciidoc[]
+--
++
+--
include::upgrade-node.asciidoc[]
include::set-paths-tip.asciidoc[]
--
@@ -47,8 +51,6 @@ include::set-paths-tip.asciidoc[]
Use the `elasticsearch-plugin` script to install the upgraded version of each
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
a node.
-+
-include::remove-xpack.asciidoc[]
. *Start each upgraded node.*
+
diff --git a/docs/reference/upgrade/remove-xpack.asciidoc b/docs/reference/upgrade/remove-xpack.asciidoc
index 9d4c4c9f779..eb13cec074b 100644
--- a/docs/reference/upgrade/remove-xpack.asciidoc
+++ b/docs/reference/upgrade/remove-xpack.asciidoc
@@ -1,4 +1,8 @@
-IMPORTANT: If you use {xpack} and are upgrading from a version prior to 6.3,
-remove {xpack} before restarting: `bin/elasticsearch-plugin remove x-pack`. As
-of 6.3, {xpack} is included in the default distribution. The node will fail to
-start if the old {xpack} plugin is present.
+IMPORTANT: If you are upgrading from a version prior to 6.3 and use {xpack}
+then you must remove the {xpack} plugin before upgrading with
+`bin/elasticsearch-plugin remove x-pack`. As of 6.3, {xpack} is included in
+the default distribution so make sure to upgrade to that one. If you upgrade
+without removing the {xpack} plugin first the node will fail to start. If you
+did not remove the {xpack} plugin and the node fails to start then you must
+downgrade to your previous version, remove {xpack}, and then upgrade again.
+In general downgrading is not supported but in this particular case it is.
diff --git a/docs/reference/upgrade/rolling_upgrade.asciidoc b/docs/reference/upgrade/rolling_upgrade.asciidoc
index e2edb6b2922..fc136f25499 100644
--- a/docs/reference/upgrade/rolling_upgrade.asciidoc
+++ b/docs/reference/upgrade/rolling_upgrade.asciidoc
@@ -44,6 +44,10 @@ include::shut-down-node.asciidoc[]
. *Upgrade the node you shut down.*
+
--
+include::remove-xpack.asciidoc[]
+--
++
+--
include::upgrade-node.asciidoc[]
include::set-paths-tip.asciidoc[]
--
@@ -53,8 +57,6 @@ include::set-paths-tip.asciidoc[]
Use the `elasticsearch-plugin` script to install the upgraded version of each
installed Elasticsearch plugin. All plugins must be upgraded when you upgrade
a node.
-+
-include::remove-xpack.asciidoc[]
. *Start the upgraded node.*
+
diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionNumberSortScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionNumberSortScript.java
new file mode 100644
index 00000000000..4a7bbc6182e
--- /dev/null
+++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionNumberSortScript.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.script.expression;
+
+import java.io.IOException;
+import org.apache.lucene.expressions.Bindings;
+import org.apache.lucene.expressions.Expression;
+import org.apache.lucene.expressions.SimpleBindings;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.DoubleValues;
+import org.apache.lucene.search.DoubleValuesSource;
+import org.elasticsearch.script.GeneralScriptException;
+import org.elasticsearch.script.NumberSortScript;
+
+/**
+ * A bridge to evaluate an {@link Expression} against {@link Bindings} in the context
+ * of a {@link NumberSortScript}.
+ */
+class ExpressionNumberSortScript implements NumberSortScript.LeafFactory {
+
+ final Expression exprScript;
+ final SimpleBindings bindings;
+ final DoubleValuesSource source;
+ final boolean needsScores;
+
+ ExpressionNumberSortScript(Expression e, SimpleBindings b, boolean needsScores) {
+ exprScript = e;
+ bindings = b;
+ source = exprScript.getDoubleValuesSource(bindings);
+ this.needsScores = needsScores;
+ }
+
+ @Override
+ public NumberSortScript newInstance(final LeafReaderContext leaf) throws IOException {
+ return new NumberSortScript() {
+ // Fake the scorer until setScorer is called.
+ DoubleValues values = source.getValues(leaf, new DoubleValues() {
+ @Override
+ public double doubleValue() {
+ return 0.0D;
+ }
+
+ @Override
+ public boolean advanceExact(int doc) {
+ return true;
+ }
+ });
+
+ @Override
+ public double execute() {
+ try {
+ return values.doubleValue();
+ } catch (Exception exception) {
+ throw new GeneralScriptException("Error evaluating " + exprScript, exception);
+ }
+ }
+
+ @Override
+ public void setDocument(int d) {
+ try {
+ values.advanceExact(d);
+ } catch (IOException e) {
+ throw new IllegalStateException("Can't advance to doc using " + exprScript, e);
+ }
+ }
+ };
+ }
+
+ @Override
+ public boolean needs_score() {
+ return needsScores;
+ }
+
+}
diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java
index 14d701fad8b..5d47c87a445 100644
--- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java
+++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java
@@ -42,6 +42,7 @@ import org.elasticsearch.script.BucketAggregationScript;
import org.elasticsearch.script.BucketAggregationSelectorScript;
import org.elasticsearch.script.ClassPermission;
import org.elasticsearch.script.FilterScript;
+import org.elasticsearch.script.NumberSortScript;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
@@ -135,6 +136,9 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
} else if (context.instanceClazz.equals(AggregationScript.class)) {
AggregationScript.Factory factory = (p, lookup) -> newAggregationScript(expr, lookup, p);
return context.factoryClazz.cast(factory);
+ } else if (context.instanceClazz.equals(NumberSortScript.class)) {
+ NumberSortScript.Factory factory = (p, lookup) -> newSortScript(expr, lookup, p);
+ return context.factoryClazz.cast(factory);
}
throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]");
}
@@ -187,7 +191,6 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
// noop: _value is special for aggregations, and is handled in ExpressionScriptBindings
// TODO: if some uses it in a scoring expression, they will get a nasty failure when evaluating...need a
// way to know this is for aggregations and so _value is ok to have...
-
} else if (vars != null && vars.containsKey(variable)) {
bindFromParams(vars, bindings, variable);
} else {
@@ -205,6 +208,33 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
return new ExpressionSearchScript(expr, bindings, specialValue, needsScores);
}
+ private NumberSortScript.LeafFactory newSortScript(Expression expr, SearchLookup lookup, @Nullable Map vars) {
+ // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings,
+ // instead of complicating SimpleBindings (which should stay simple)
+ SimpleBindings bindings = new SimpleBindings();
+ boolean needsScores = false;
+ for (String variable : expr.variables) {
+ try {
+ if (variable.equals("_score")) {
+ bindings.add(new SortField("_score", SortField.Type.SCORE));
+ needsScores = true;
+ } else if (vars != null && vars.containsKey(variable)) {
+ bindFromParams(vars, bindings, variable);
+ } else {
+ // delegate valuesource creation based on field's type
+ // there are three types of "fields" to expressions, and each one has a different "api" of variables and methods.
+ final ValueSource valueSource = getDocValueSource(variable, lookup);
+ needsScores |= valueSource.getSortField(false).needsScores();
+ bindings.add(variable, valueSource.asDoubleValuesSource());
+ }
+ } catch (Exception e) {
+ // we defer "binding" of variables until here: give context for that variable
+ throw convertToScriptException("link error", expr.sourceText, variable, e);
+ }
+ }
+ return new ExpressionNumberSortScript(expr, bindings, needsScores);
+ }
+
private TermsSetQueryScript.LeafFactory newTermsSetQueryScript(Expression expr, SearchLookup lookup,
@Nullable Map vars) {
// NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings,
diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java
new file mode 100644
index 00000000000..301fd2d4db7
--- /dev/null
+++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.script.expression;
+
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.Collections;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
+import org.elasticsearch.index.fielddata.IndexNumericFieldData;
+import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
+import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType;
+import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
+import org.elasticsearch.script.NumberSortScript;
+import org.elasticsearch.script.ScriptException;
+import org.elasticsearch.search.lookup.SearchLookup;
+import org.elasticsearch.test.ESTestCase;
+
+import static org.mockito.Matchers.anyInt;
+import static org.mockito.Matchers.anyObject;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class ExpressionNumberSortScriptTests extends ESTestCase {
+ private ExpressionScriptEngine service;
+ private SearchLookup lookup;
+
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+
+ NumberFieldType fieldType = new NumberFieldType(NumberType.DOUBLE);
+ MapperService mapperService = mock(MapperService.class);
+ when(mapperService.fullName("field")).thenReturn(fieldType);
+ when(mapperService.fullName("alias")).thenReturn(fieldType);
+
+ SortedNumericDoubleValues doubleValues = mock(SortedNumericDoubleValues.class);
+ when(doubleValues.advanceExact(anyInt())).thenReturn(true);
+ when(doubleValues.nextValue()).thenReturn(2.718);
+
+ AtomicNumericFieldData atomicFieldData = mock(AtomicNumericFieldData.class);
+ when(atomicFieldData.getDoubleValues()).thenReturn(doubleValues);
+
+ IndexNumericFieldData fieldData = mock(IndexNumericFieldData.class);
+ when(fieldData.getFieldName()).thenReturn("field");
+ when(fieldData.load(anyObject())).thenReturn(atomicFieldData);
+
+ service = new ExpressionScriptEngine(Settings.EMPTY);
+ lookup = new SearchLookup(mapperService, ignored -> fieldData, null);
+ }
+
+ private NumberSortScript.LeafFactory compile(String expression) {
+ NumberSortScript.Factory factory =
+ service.compile(null, expression, NumberSortScript.CONTEXT, Collections.emptyMap());
+ return factory.newFactory(Collections.emptyMap(), lookup);
+ }
+
+ public void testCompileError() {
+ ScriptException e = expectThrows(ScriptException.class, () -> {
+ compile("doc['field'].value * *@#)(@$*@#$ + 4");
+ });
+ assertTrue(e.getCause() instanceof ParseException);
+ }
+
+ public void testLinkError() {
+ ScriptException e = expectThrows(ScriptException.class, () -> {
+ compile("doc['nonexistent'].value * 5");
+ });
+ assertTrue(e.getCause() instanceof ParseException);
+ }
+
+ public void testFieldAccess() throws IOException {
+ NumberSortScript script = compile("doc['field'].value").newInstance(null);
+ script.setDocument(1);
+
+ double result = script.execute();
+ assertEquals(2.718, result, 0.0);
+ }
+
+ public void testFieldAccessWithFieldAlias() throws IOException {
+ NumberSortScript script = compile("doc['alias'].value").newInstance(null);
+ script.setDocument(1);
+
+ double result = script.execute();
+ assertEquals(2.718, result, 0.0);
+ }
+}
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java
index 86f2af32d16..eeb636d6697 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java
@@ -23,8 +23,8 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.painless.spi.Whitelist;
+import org.elasticsearch.script.NumberSortScript;
import org.elasticsearch.script.ScriptContext;
-import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESSingleNodeTestCase;
@@ -43,25 +43,25 @@ public class NeedsScoreTests extends ESSingleNodeTestCase {
IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double");
Map, List> contexts = new HashMap<>();
- contexts.put(SearchScript.CONTEXT, Whitelist.BASE_WHITELISTS);
+ contexts.put(NumberSortScript.CONTEXT, Whitelist.BASE_WHITELISTS);
PainlessScriptEngine service = new PainlessScriptEngine(Settings.EMPTY, contexts);
QueryShardContext shardContext = index.newQueryShardContext(0, null, () -> 0, null);
SearchLookup lookup = new SearchLookup(index.mapperService(), shardContext::getForField, null);
- SearchScript.Factory factory = service.compile(null, "1.2", SearchScript.CONTEXT, Collections.emptyMap());
- SearchScript.LeafFactory ss = factory.newFactory(Collections.emptyMap(), lookup);
+ NumberSortScript.Factory factory = service.compile(null, "1.2", NumberSortScript.CONTEXT, Collections.emptyMap());
+ NumberSortScript.LeafFactory ss = factory.newFactory(Collections.emptyMap(), lookup);
assertFalse(ss.needs_score());
- factory = service.compile(null, "doc['d'].value", SearchScript.CONTEXT, Collections.emptyMap());
+ factory = service.compile(null, "doc['d'].value", NumberSortScript.CONTEXT, Collections.emptyMap());
ss = factory.newFactory(Collections.emptyMap(), lookup);
assertFalse(ss.needs_score());
- factory = service.compile(null, "1/_score", SearchScript.CONTEXT, Collections.emptyMap());
+ factory = service.compile(null, "1/_score", NumberSortScript.CONTEXT, Collections.emptyMap());
ss = factory.newFactory(Collections.emptyMap(), lookup);
assertTrue(ss.needs_score());
- factory = service.compile(null, "doc['d'].value * _score", SearchScript.CONTEXT, Collections.emptyMap());
+ factory = service.compile(null, "doc['d'].value * _score", NumberSortScript.CONTEXT, Collections.emptyMap());
ss = factory.newFactory(Collections.emptyMap(), lookup);
assertTrue(ss.needs_score());
}
diff --git a/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java b/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java
new file mode 100644
index 00000000000..1d8de9f95f4
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.script;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.Scorable;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.common.lucene.ScorerAware;
+import org.elasticsearch.index.fielddata.ScriptDocValues;
+import org.elasticsearch.search.lookup.LeafSearchLookup;
+import org.elasticsearch.search.lookup.SearchLookup;
+
+abstract class AbstractSortScript implements ScorerAware {
+
+ private static final Map DEPRECATIONS;
+
+ static {
+ Map deprecations = new HashMap<>();
+ deprecations.put(
+ "doc",
+ "Accessing variable [doc] via [params.doc] from within a sort-script " +
+ "is deprecated in favor of directly accessing [doc]."
+ );
+ deprecations.put(
+ "_doc",
+ "Accessing variable [doc] via [params._doc] from within a sort-script " +
+ "is deprecated in favor of directly accessing [doc]."
+ );
+ DEPRECATIONS = Collections.unmodifiableMap(deprecations);
+ }
+
+ /**
+ * The generic runtime parameters for the script.
+ */
+ private final Map params;
+
+ /** A scorer that will return the score for the current document when the script is run. */
+ private Scorable scorer;
+
+ /**
+ * A leaf lookup for the bound segment this script will operate on.
+ */
+ private final LeafSearchLookup leafLookup;
+
+ AbstractSortScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) {
+ this.leafLookup = lookup.getLeafSearchLookup(leafContext);
+ Map parameters = new HashMap<>(params);
+ parameters.putAll(leafLookup.asMap());
+ this.params = new ParameterMap(parameters, DEPRECATIONS);
+ }
+
+ protected AbstractSortScript() {
+ this.params = null;
+ this.leafLookup = null;
+ }
+
+ /**
+ * Return the parameters for this script.
+ */
+ public Map getParams() {
+ return params;
+ }
+
+ @Override
+ public void setScorer(Scorable scorer) {
+ this.scorer = scorer;
+ }
+
+ /** Return the score of the current document. */
+ public double get_score() {
+ try {
+ return scorer.score();
+ } catch (IOException e) {
+ throw new ElasticsearchException("couldn't lookup score", e);
+ }
+ }
+
+ /**
+ * The doc lookup for the Lucene segment this script was created for.
+ */
+ public Map> getDoc() {
+ return leafLookup.doc();
+ }
+
+ /**
+ * Set the current document to run the script on next.
+ */
+ public void setDocument(int docid) {
+ leafLookup.setDocument(docid);
+ }
+}
diff --git a/server/src/main/java/org/elasticsearch/script/NumberSortScript.java b/server/src/main/java/org/elasticsearch/script/NumberSortScript.java
new file mode 100644
index 00000000000..d0b3fdbed36
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/script/NumberSortScript.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.script;
+
+import java.io.IOException;
+import java.util.Map;
+import org.apache.lucene.index.LeafReaderContext;
+import org.elasticsearch.search.lookup.SearchLookup;
+
+public abstract class NumberSortScript extends AbstractSortScript {
+
+ public static final String[] PARAMETERS = {};
+
+ public static final ScriptContext CONTEXT = new ScriptContext<>("number_sort", Factory.class);
+
+ public NumberSortScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) {
+ super(params, lookup, leafContext);
+ }
+
+ protected NumberSortScript() {
+ super();
+ }
+
+ public abstract double execute();
+
+ /**
+ * A factory to construct {@link NumberSortScript} instances.
+ */
+ public interface LeafFactory {
+ NumberSortScript newInstance(LeafReaderContext ctx) throws IOException;
+
+ /**
+ * Return {@code true} if the script needs {@code _score} calculated, or {@code false} otherwise.
+ */
+ boolean needs_score();
+ }
+
+ /**
+ * A factory to construct stateful {@link NumberSortScript} factories for a specific index.
+ */
+ public interface Factory {
+ LeafFactory newFactory(Map params, SearchLookup lookup);
+ }
+}
diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java
index ddc090251a3..24dd491e3a1 100644
--- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java
+++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java
@@ -43,7 +43,8 @@ public class ScriptModule {
SearchScript.CONTEXT,
AggregationScript.CONTEXT,
ScoreScript.CONTEXT,
- SearchScript.SCRIPT_SORT_CONTEXT,
+ NumberSortScript.CONTEXT,
+ StringSortScript.CONTEXT,
TermsSetQueryScript.CONTEXT,
UpdateScript.CONTEXT,
BucketAggregationScript.CONTEXT,
diff --git a/server/src/main/java/org/elasticsearch/script/SearchScript.java b/server/src/main/java/org/elasticsearch/script/SearchScript.java
index 496af2fbdd5..2fd439564a6 100644
--- a/server/src/main/java/org/elasticsearch/script/SearchScript.java
+++ b/server/src/main/java/org/elasticsearch/script/SearchScript.java
@@ -139,7 +139,4 @@ public abstract class SearchScript implements ScorerAware {
/** The context used to compile {@link SearchScript} factories. */
public static final ScriptContext CONTEXT = new ScriptContext<>("search", Factory.class);
- // TODO: remove these contexts when it has its own interface
- // Can return a double. (For ScriptSortType#NUMBER only, for ScriptSortType#STRING normal CONTEXT should be used)
- public static final ScriptContext SCRIPT_SORT_CONTEXT = new ScriptContext<>("sort", Factory.class);
}
diff --git a/server/src/main/java/org/elasticsearch/script/StringSortScript.java b/server/src/main/java/org/elasticsearch/script/StringSortScript.java
new file mode 100644
index 00000000000..1c6c47dd215
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/script/StringSortScript.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.script;
+
+import java.io.IOException;
+import java.util.Map;
+import org.apache.lucene.index.LeafReaderContext;
+import org.elasticsearch.search.lookup.SearchLookup;
+
+public abstract class StringSortScript extends AbstractSortScript {
+
+ public static final String[] PARAMETERS = {};
+
+ public static final ScriptContext CONTEXT = new ScriptContext<>("string_sort", Factory.class);
+
+ public StringSortScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) {
+ super(params, lookup, leafContext);
+ }
+
+ public abstract String execute();
+
+ /**
+ * A factory to construct {@link StringSortScript} instances.
+ */
+ public interface LeafFactory {
+ StringSortScript newInstance(LeafReaderContext ctx) throws IOException;
+ }
+
+ /**
+ * A factory to construct stateful {@link StringSortScript} factories for a specific index.
+ */
+ public interface Factory {
+ LeafFactory newFactory(Map params, SearchLookup lookup);
+ }
+}
diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java
index e425755a55e..95478e08324 100644
--- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java
@@ -32,7 +32,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
-import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -51,7 +50,8 @@ import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.script.Script;
-import org.elasticsearch.script.SearchScript;
+import org.elasticsearch.script.NumberSortScript;
+import org.elasticsearch.script.StringSortScript;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
@@ -305,9 +305,6 @@ public class ScriptSortBuilder extends SortBuilder {
@Override
public SortFieldAndFormat build(QueryShardContext context) throws IOException {
- final SearchScript.Factory factory = context.getScriptService().compile(script, SearchScript.SCRIPT_SORT_CONTEXT);
- final SearchScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup());
-
MultiValueMode valueMode = null;
if (sortMode != null) {
valueMode = MultiValueMode.fromString(sortMode.toString());
@@ -336,8 +333,10 @@ public class ScriptSortBuilder extends SortBuilder {
final IndexFieldData.XFieldComparatorSource fieldComparatorSource;
switch (type) {
case STRING:
+ final StringSortScript.Factory factory = context.getScriptService().compile(script, StringSortScript.CONTEXT);
+ final StringSortScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup());
fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, valueMode, nested) {
- SearchScript leafScript;
+ StringSortScript leafScript;
@Override
protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException {
leafScript = searchScript.newInstance(context);
@@ -350,9 +349,7 @@ public class ScriptSortBuilder extends SortBuilder {
}
@Override
public BytesRef binaryValue() {
- final Object run = leafScript.run();
- CollectionUtils.ensureNoSelfReferences(run, "ScriptSortBuilder leaf script");
- spare.copyChars(run.toString());
+ spare.copyChars(leafScript.execute());
return spare.get();
}
};
@@ -365,11 +362,13 @@ public class ScriptSortBuilder extends SortBuilder {
};
break;
case NUMBER:
+ final NumberSortScript.Factory numberSortFactory = context.getScriptService().compile(script, NumberSortScript.CONTEXT);
+ final NumberSortScript.LeafFactory numberSortScript = numberSortFactory.newFactory(script.getParams(), context.lookup());
fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) {
- SearchScript leafScript;
+ NumberSortScript leafScript;
@Override
protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws IOException {
- leafScript = searchScript.newInstance(context);
+ leafScript = numberSortScript.newInstance(context);
final NumericDoubleValues values = new NumericDoubleValues() {
@Override
public boolean advanceExact(int doc) throws IOException {
@@ -378,7 +377,7 @@ public class ScriptSortBuilder extends SortBuilder {
}
@Override
public double doubleValue() {
- return leafScript.runAsDouble();
+ return leafScript.execute();
}
};
return FieldData.singleton(values);
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
index 7823cd8849c..c75d8bb054f 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
@@ -274,6 +274,7 @@ public class RolloverIT extends ESIntegTestCase {
public void testRolloverWithDateMath() {
ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
+ assumeTrue("only works on the same day", now.plusMinutes(5).getDayOfYear() == now.getDayOfYear());
String index = "test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-1";
String dateMathExp = "";
assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get());
diff --git a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java
index 0ba83d649a7..0bb72a4050d 100644
--- a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java
+++ b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java
@@ -19,6 +19,7 @@
package org.elasticsearch.discovery;
+import java.nio.file.Path;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
@@ -33,7 +34,8 @@ import org.elasticsearch.discovery.zen.ZenPing;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration;
+import org.elasticsearch.test.InternalTestCluster;
+import org.elasticsearch.test.NodeConfigurationSource;
import org.elasticsearch.test.discovery.TestZenDiscovery;
import org.elasticsearch.test.disruption.NetworkDisruption;
import org.elasticsearch.test.disruption.NetworkDisruption.Bridge;
@@ -52,9 +54,9 @@ import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
+import static org.elasticsearch.discovery.DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
@@ -62,7 +64,7 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
static final TimeValue DISRUPTION_HEALING_OVERHEAD = TimeValue.timeValueSeconds(40); // we use 30s as timeout in many places.
- private ClusterDiscoveryConfiguration discoveryConfig;
+ private NodeConfigurationSource discoveryConfig;
@Override
protected Settings nodeSettings(int nodeOrdinal) {
@@ -116,18 +118,14 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
}
}
- List startCluster(int numberOfNodes) throws ExecutionException, InterruptedException {
+ List startCluster(int numberOfNodes) {
return startCluster(numberOfNodes, -1);
}
- List startCluster(int numberOfNodes, int minimumMasterNode) throws ExecutionException, InterruptedException {
- return startCluster(numberOfNodes, minimumMasterNode, null);
- }
-
- List startCluster(int numberOfNodes, int minimumMasterNode, @Nullable int[] unicastHostsOrdinals) throws
- ExecutionException, InterruptedException {
- configureCluster(numberOfNodes, unicastHostsOrdinals, minimumMasterNode);
- List nodes = internalCluster().startNodes(numberOfNodes);
+ List startCluster(int numberOfNodes, int minimumMasterNode) {
+ configureCluster(numberOfNodes, minimumMasterNode);
+ InternalTestCluster internalCluster = internalCluster();
+ List nodes = internalCluster.startNodes(numberOfNodes);
ensureStableCluster(numberOfNodes);
// TODO: this is a temporary solution so that nodes will not base their reaction to a partition based on previous successful results
@@ -154,20 +152,11 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
return Arrays.asList(MockTransportService.TestPlugin.class);
}
- void configureCluster(
- int numberOfNodes,
- @Nullable int[] unicastHostsOrdinals,
- int minimumMasterNode
- ) throws ExecutionException, InterruptedException {
- configureCluster(DEFAULT_SETTINGS, numberOfNodes, unicastHostsOrdinals, minimumMasterNode);
+ void configureCluster(int numberOfNodes, int minimumMasterNode) {
+ configureCluster(DEFAULT_SETTINGS, numberOfNodes, minimumMasterNode);
}
- void configureCluster(
- Settings settings,
- int numberOfNodes,
- @Nullable int[] unicastHostsOrdinals,
- int minimumMasterNode
- ) throws ExecutionException, InterruptedException {
+ void configureCluster(Settings settings, int numberOfNodes, int minimumMasterNode) {
if (minimumMasterNode < 0) {
minimumMasterNode = numberOfNodes / 2 + 1;
}
@@ -177,14 +166,21 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
.put(settings)
.put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numberOfNodes)
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minimumMasterNode)
+ .putList(DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "file")
.build();
if (discoveryConfig == null) {
- if (unicastHostsOrdinals == null) {
- discoveryConfig = new ClusterDiscoveryConfiguration.UnicastZen(numberOfNodes, nodeSettings);
- } else {
- discoveryConfig = new ClusterDiscoveryConfiguration.UnicastZen(numberOfNodes, nodeSettings, unicastHostsOrdinals);
- }
+ discoveryConfig = new NodeConfigurationSource() {
+ @Override
+ public Settings nodeSettings(final int nodeOrdinal) {
+ return nodeSettings;
+ }
+
+ @Override
+ public Path nodeConfigPath(final int nodeOrdinal) {
+ return null;
+ }
+ };
}
}
diff --git a/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java
index 3b08eb6870e..b35bf8444e9 100644
--- a/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java
+++ b/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java
@@ -363,7 +363,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase {
*/
public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Exception {
// don't use DEFAULT settings (which can cause node disconnects on a slow CI machine)
- configureCluster(Settings.EMPTY, 3, null, 1);
+ configureCluster(Settings.EMPTY, 3, 1);
internalCluster().startMasterOnlyNode();
final String node_1 = internalCluster().startDataOnlyNode();
@@ -390,7 +390,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase {
public void testIndexImportedFromDataOnlyNodesIfMasterLostDataFolder() throws Exception {
// test for https://github.com/elastic/elasticsearch/issues/8823
- configureCluster(2, null, 1);
+ configureCluster(2, 1);
String masterNode = internalCluster().startMasterOnlyNode(Settings.EMPTY);
internalCluster().startDataOnlyNode(Settings.EMPTY);
@@ -421,7 +421,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase {
.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "30s") // wait till cluster state is committed
.build();
final String idxName = "test";
- configureCluster(settings, 3, null, 2);
+ configureCluster(settings, 3, 2);
final List allMasterEligibleNodes = internalCluster().startMasterOnlyNodes(2);
final String dataNode = internalCluster().startDataOnlyNode();
ensureStableCluster(3);
diff --git a/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java
index 4d190921f2d..2c7f17468ac 100644
--- a/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java
+++ b/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java
@@ -59,7 +59,8 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase {
public void testIsolatedUnicastNodes() throws Exception {
- List nodes = startCluster(4, -1, new int[]{0});
+ internalCluster().setHostsListContainsOnlyFirstNode(true);
+ List nodes = startCluster(4, -1);
// Figure out what is the elected master node
final String unicastTarget = nodes.get(0);
@@ -98,7 +99,8 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase {
* The rejoining node should take this master node and connect.
*/
public void testUnicastSinglePingResponseContainsMaster() throws Exception {
- List nodes = startCluster(4, -1, new int[]{0});
+ internalCluster().setHostsListContainsOnlyFirstNode(true);
+ List nodes = startCluster(4, -1);
// Figure out what is the elected master node
final String masterNode = internalCluster().getMasterName();
logger.info("---> legit elected master node={}", masterNode);
@@ -194,7 +196,7 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase {
}
public void testClusterFormingWithASlowNode() throws Exception {
- configureCluster(3, null, 2);
+ configureCluster(3, 2);
SlowClusterStateProcessing disruption = new SlowClusterStateProcessing(random(), 0, 0, 1000, 2000);
@@ -210,7 +212,7 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase {
}
public void testElectMasterWithLatestVersion() throws Exception {
- configureCluster(3, null, 2);
+ configureCluster(3, 2);
final Set nodes = new HashSet<>(internalCluster().startNodes(3));
ensureStableCluster(3);
ServiceDisruptionScheme isolateAllNodes =
diff --git a/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java
index d8390a6c688..474848a834e 100644
--- a/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java
+++ b/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java
@@ -60,7 +60,7 @@ public class SnapshotDisruptionIT extends AbstractDisruptionTestCase {
.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "30s") // wait till cluster state is committed
.build();
final String idxName = "test";
- configureCluster(settings, 4, null, 2);
+ configureCluster(settings, 4, 2);
final List allMasterEligibleNodes = internalCluster().startMasterOnlyNodes(3);
final String dataNode = internalCluster().startDataOnlyNode();
ensureStableCluster(4);
diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
index 9e89bf2b59d..64a9d97e3f4 100644
--- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
+++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
@@ -105,6 +105,47 @@ public class MockScriptEngine implements ScriptEngine {
}
};
return context.factoryClazz.cast(factory);
+ } else if (context.instanceClazz.equals(NumberSortScript.class)) {
+ NumberSortScript.Factory factory = (parameters, lookup) -> new NumberSortScript.LeafFactory() {
+ @Override
+ public NumberSortScript newInstance(final LeafReaderContext ctx) {
+ return new NumberSortScript(parameters, lookup, ctx) {
+ @Override
+ public double execute() {
+ Map vars = new HashMap<>(parameters);
+ vars.put("params", parameters);
+ vars.put("doc", getDoc());
+ return ((Number) script.apply(vars)).doubleValue();
+ }
+ };
+ }
+
+ @Override
+ public boolean needs_score() {
+ return false;
+ }
+ };
+ return context.factoryClazz.cast(factory);
+ } else if (context.instanceClazz.equals(StringSortScript.class)) {
+ StringSortScript.Factory factory = (parameters, lookup) -> (StringSortScript.LeafFactory) ctx
+ -> new StringSortScript(parameters, lookup, ctx) {
+ @Override
+ public String execute() {
+ Map vars = new HashMap<>(parameters);
+ vars.put("params", parameters);
+ vars.put("doc", getDoc());
+ return String.valueOf(script.apply(vars));
+ }
+ };
+ return context.factoryClazz.cast(factory);
+ } else if (context.instanceClazz.equals(IngestScript.class)) {
+ IngestScript.Factory factory = vars -> new IngestScript(vars) {
+ @Override
+ public void execute(Map ctx) {
+ script.apply(ctx);
+ }
+ };
+ return context.factoryClazz.cast(factory);
} else if(context.instanceClazz.equals(AggregationScript.class)) {
AggregationScript.Factory factory = (parameters, lookup) -> new AggregationScript.LeafFactory() {
@Override
diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java
index 5d6a1ebe6ed..6d1b80e0364 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java
@@ -232,6 +232,9 @@ public final class InternalTestCluster extends TestCluster {
private ServiceDisruptionScheme activeDisruptionScheme;
private Function clientWrapper;
+ // If set to true only the first node in the cluster will be made a unicast node
+ private boolean hostsListContainsOnlyFirstNode;
+
public InternalTestCluster(
final long clusterSeed,
final Path baseDir,
@@ -1613,12 +1616,17 @@ public final class InternalTestCluster extends TestCluster {
private final Object discoveryFileMutex = new Object();
- private void rebuildUnicastHostFiles(Collection newNodes) {
+ private void rebuildUnicastHostFiles(List newNodes) {
// cannot be a synchronized method since it's called on other threads from within synchronized startAndPublishNodesAndClients()
synchronized (discoveryFileMutex) {
try {
- List discoveryFileContents = Stream.concat(nodes.values().stream(), newNodes.stream())
- .map(nac -> nac.node.injector().getInstance(TransportService.class)).filter(Objects::nonNull)
+ Stream unicastHosts = Stream.concat(nodes.values().stream(), newNodes.stream());
+ if (hostsListContainsOnlyFirstNode) {
+ unicastHosts = unicastHosts.limit(1L);
+ }
+ List discoveryFileContents = unicastHosts.map(
+ nac -> nac.node.injector().getInstance(TransportService.class)
+ ).filter(Objects::nonNull)
.map(TransportService::getLocalNode).filter(Objects::nonNull).filter(DiscoveryNode::isMasterNode)
.map(n -> n.getAddress().toString())
.distinct().collect(Collectors.toList());
@@ -2050,6 +2058,9 @@ public final class InternalTestCluster extends TestCluster {
return filterNodes(nodes, NodeAndClient::isMasterEligible).size();
}
+ public void setHostsListContainsOnlyFirstNode(boolean hostsListContainsOnlyFirstNode) {
+ this.hostsListContainsOnlyFirstNode = hostsListContainsOnlyFirstNode;
+ }
public void setDisruptionScheme(ServiceDisruptionScheme scheme) {
assert activeDisruptionScheme == null :
diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java
deleted file mode 100644
index a63ba22bb51..00000000000
--- a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.test.discovery;
-
-import com.carrotsearch.randomizedtesting.RandomizedTest;
-import com.carrotsearch.randomizedtesting.SysGlobals;
-import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.common.SuppressForbidden;
-import org.elasticsearch.common.network.NetworkUtils;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.util.CollectionUtils;
-import org.elasticsearch.env.NodeEnvironment;
-import org.elasticsearch.mocksocket.MockServerSocket;
-import org.elasticsearch.test.NodeConfigurationSource;
-import org.elasticsearch.transport.TcpTransport;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.ServerSocket;
-import java.nio.file.Path;
-import java.util.HashSet;
-import java.util.Set;
-
-public class ClusterDiscoveryConfiguration extends NodeConfigurationSource {
-
- /**
- * The number of ports in the range used for this JVM
- */
- private static final int PORTS_PER_JVM = 100;
-
- private static final int JVM_ORDINAL = Integer.parseInt(System.getProperty(SysGlobals.CHILDVM_SYSPROP_JVM_ID, "0"));
-
- /**
- * a per-JVM unique offset to be used for calculating unique port ranges.
- */
- private static final int JVM_BASE_PORT_OFFSET = PORTS_PER_JVM * (JVM_ORDINAL + 1);
-
-
- static Settings DEFAULT_NODE_SETTINGS = Settings.EMPTY;
- private static final String IP_ADDR = "127.0.0.1";
-
- final int numOfNodes;
- final Settings nodeSettings;
- final Settings transportClientSettings;
-
- public ClusterDiscoveryConfiguration(int numOfNodes, Settings extraSettings) {
- this.numOfNodes = numOfNodes;
- this.nodeSettings = Settings.builder().put(DEFAULT_NODE_SETTINGS).put(extraSettings).build();
- this.transportClientSettings = Settings.builder().put(extraSettings).build();
- }
-
- @Override
- public Settings nodeSettings(int nodeOrdinal) {
- return nodeSettings;
- }
-
- @Override
- public Path nodeConfigPath(int nodeOrdinal) {
- return null;
- }
-
- @Override
- public Settings transportClientSettings() {
- return transportClientSettings;
- }
-
- public static class UnicastZen extends ClusterDiscoveryConfiguration {
-
- // this variable is incremented on each bind attempt and will maintain the next port that should be tried
- private static int nextPort = calcBasePort();
-
- private final int[] unicastHostOrdinals;
- private final int[] unicastHostPorts;
-
- public UnicastZen(int numOfNodes, Settings extraSettings) {
- this(numOfNodes, numOfNodes, extraSettings);
- }
-
- public UnicastZen(int numOfNodes, int numOfUnicastHosts, Settings extraSettings) {
- super(numOfNodes, extraSettings);
- if (numOfUnicastHosts == numOfNodes) {
- unicastHostOrdinals = new int[numOfNodes];
- for (int i = 0; i < numOfNodes; i++) {
- unicastHostOrdinals[i] = i;
- }
- } else {
- Set ordinals = new HashSet<>(numOfUnicastHosts);
- while (ordinals.size() != numOfUnicastHosts) {
- ordinals.add(RandomizedTest.randomInt(numOfNodes - 1));
- }
- unicastHostOrdinals = CollectionUtils.toArray(ordinals);
- }
- this.unicastHostPorts = unicastHostPorts(numOfNodes);
- assert unicastHostOrdinals.length <= unicastHostPorts.length;
- }
-
- public UnicastZen(int numOfNodes, int[] unicastHostOrdinals) {
- this(numOfNodes, Settings.EMPTY, unicastHostOrdinals);
- }
-
- public UnicastZen(int numOfNodes, Settings extraSettings, int[] unicastHostOrdinals) {
- super(numOfNodes, extraSettings);
- this.unicastHostOrdinals = unicastHostOrdinals;
- this.unicastHostPorts = unicastHostPorts(numOfNodes);
- assert unicastHostOrdinals.length <= unicastHostPorts.length;
- }
-
- private static int calcBasePort() {
- return 30000 + JVM_BASE_PORT_OFFSET;
- }
-
- @Override
- public Settings nodeSettings(int nodeOrdinal) {
- Settings.Builder builder = Settings.builder().put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numOfNodes);
-
- String[] unicastHosts = new String[unicastHostOrdinals.length];
- if (nodeOrdinal >= unicastHostPorts.length) {
- throw new ElasticsearchException("nodeOrdinal [" + nodeOrdinal + "] is greater than the number unicast ports ["
- + unicastHostPorts.length + "]");
- } else {
- // we need to pin the node port & host so we'd know where to point things
- builder.put(TcpTransport.PORT.getKey(), unicastHostPorts[nodeOrdinal]);
- builder.put(TcpTransport.HOST.getKey(), IP_ADDR); // only bind on one IF we use v4 here by default
- for (int i = 0; i < unicastHostOrdinals.length; i++) {
- unicastHosts[i] = IP_ADDR + ":" + (unicastHostPorts[unicastHostOrdinals[i]]);
- }
- }
- builder.putList("discovery.zen.ping.unicast.hosts", unicastHosts);
- return builder.put(super.nodeSettings(nodeOrdinal)).build();
- }
-
- @SuppressForbidden(reason = "we know we pass a IP address")
- protected static synchronized int[] unicastHostPorts(int numHosts) {
- int[] unicastHostPorts = new int[numHosts];
-
- final int basePort = calcBasePort();
- final int maxPort = basePort + PORTS_PER_JVM;
- int tries = 0;
- for (int i = 0; i < unicastHostPorts.length; i++) {
- boolean foundPortInRange = false;
- while (tries < PORTS_PER_JVM && !foundPortInRange) {
- try (ServerSocket serverSocket = new MockServerSocket()) {
- // Set SO_REUSEADDR as we may bind here and not be able to reuse the address immediately without it.
- serverSocket.setReuseAddress(NetworkUtils.defaultReuseAddress());
- serverSocket.bind(new InetSocketAddress(IP_ADDR, nextPort));
- // bind was a success
- foundPortInRange = true;
- unicastHostPorts[i] = nextPort;
- } catch (IOException e) {
- // Do nothing
- }
-
- nextPort++;
- if (nextPort >= maxPort) {
- // Roll back to the beginning of the range and do not go into another JVM's port range
- nextPort = basePort;
- }
- tries++;
- }
-
- if (!foundPortInRange) {
- throw new ElasticsearchException("could not find enough open ports in range [" + basePort + "-" + maxPort
- + "]. required [" + unicastHostPorts.length + "] ports");
- }
- }
- return unicastHostPorts;
- }
- }
-}
diff --git a/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java b/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java
index b8d0c915899..2ff638ebca0 100644
--- a/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java
+++ b/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java
@@ -110,22 +110,12 @@ public class ChainIT extends ESRestTestCase {
assertOK(client().performRequest(new Request("POST", "/" + index + "/_refresh")));
}
- private static void resumeFollow(String leaderIndex, String followIndex) throws IOException {
- final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow");
- request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}");
- assertOK(client().performRequest(request));
- }
-
private static void followIndex(String leaderIndex, String followIndex) throws IOException {
final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow");
request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}");
assertOK(client().performRequest(request));
}
- private static void pauseFollow(String followIndex) throws IOException {
- assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/pause_follow")));
- }
-
private static void verifyDocuments(String index, int expectedNumDocs) throws IOException {
verifyDocuments(index, expectedNumDocs, client());
}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java
index a784922228b..d0de0e02038 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java
@@ -6,6 +6,7 @@
package org.elasticsearch.xpack.core.rollup;
import org.elasticsearch.common.ParseField;
+import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
@@ -15,7 +16,9 @@ import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilde
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import java.util.Arrays;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -34,8 +37,19 @@ public class RollupField {
public static final String TYPE_NAME = "_doc";
public static final String AGG = "agg";
public static final String ROLLUP_MISSING = "ROLLUP_MISSING_40710B25931745D4B0B8B310F6912A69";
- public static final List SUPPORTED_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME,
+ public static final List SUPPORTED_NUMERIC_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME,
SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
+ public static final List SUPPORTED_DATE_METRICS = Arrays.asList(MaxAggregationBuilder.NAME,
+ MinAggregationBuilder.NAME,
+ ValueCountAggregationBuilder.NAME);
+
+ // a set of ALL our supported metrics, to be a union of all other supported metric types (numeric, date, etc.)
+ public static final Set SUPPORTED_METRICS;
+ static {
+ SUPPORTED_METRICS = new HashSet<>();
+ SUPPORTED_METRICS.addAll(SUPPORTED_NUMERIC_METRICS);
+ SUPPORTED_METRICS.addAll(SUPPORTED_DATE_METRICS);
+ }
// these mapper types are used by the configs (metric, histo, etc) to validate field mappings
public static final List NUMERIC_FIELD_MAPPER_TYPES;
@@ -47,6 +61,8 @@ public class RollupField {
NUMERIC_FIELD_MAPPER_TYPES = types;
}
+ public static final String DATE_FIELD_MAPPER_TYPE = DateFieldMapper.CONTENT_TYPE;
+
/**
* Format to the appropriate Rollup field name convention
*
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java
index 3a267e4cfa4..46f0c7397c6 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java
@@ -19,6 +19,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.rollup.RollupField;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -108,18 +109,24 @@ public class MetricConfig implements Writeable, ToXContentObject {
Map fieldCaps = fieldCapsResponse.get(field);
if (fieldCaps != null && fieldCaps.isEmpty() == false) {
fieldCaps.forEach((key, value) -> {
+ if (value.isAggregatable() == false) {
+ validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " +
+ "but is not.");
+ }
if (RollupField.NUMERIC_FIELD_MAPPER_TYPES.contains(key)) {
- if (value.isAggregatable() == false) {
- validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " +
- "but is not.");
+ // nothing to do as all metrics are supported by SUPPORTED_NUMERIC_METRICS currently
+ } else if (RollupField.DATE_FIELD_MAPPER_TYPE.equals(key)) {
+ if (RollupField.SUPPORTED_DATE_METRICS.containsAll(metrics) == false) {
+ validationException.addValidationError(
+ buildSupportedMetricError("date", RollupField.SUPPORTED_DATE_METRICS));
}
} else {
- validationException.addValidationError("The field referenced by a metric group must be a [numeric] type, but found " +
- fieldCaps.keySet().toString() + " for field [" + field + "]");
+ validationException.addValidationError("The field referenced by a metric group must be a [numeric] or [date] type, " +
+ "but found " + fieldCaps.keySet().toString() + " for field [" + field + "]");
}
});
} else {
- validationException.addValidationError("Could not find a [numeric] field with name [" + field + "] in any of the " +
+ validationException.addValidationError("Could not find a [numeric] or [date] field with name [" + field + "] in any of the " +
"indices matching the index pattern.");
}
}
@@ -166,4 +173,11 @@ public class MetricConfig implements Writeable, ToXContentObject {
public static MetricConfig fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
+
+ private String buildSupportedMetricError(String type, List supportedMetrics) {
+ List unsupportedMetrics = new ArrayList<>(metrics);
+ unsupportedMetrics.removeAll(supportedMetrics);
+ return "Only the metrics " + supportedMetrics + " are supported for [" + type + "] types," +
+ " but unsupported metrics " + unsupportedMetrics + " supplied for field [" + field + "]";
+ }
}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java
index c26b66875e6..a63c3b0dc8c 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java
@@ -16,7 +16,6 @@ public class APMSystemUser extends User {
public static final String NAME = UsernamesField.APM_NAME;
public static final String ROLE_NAME = UsernamesField.APM_ROLE;
public static final Version DEFINED_SINCE = Version.V_6_5_0;
- public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE);
public APMSystemUser(boolean enabled) {
super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled);
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BeatsSystemUser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BeatsSystemUser.java
index dfa437fa8d2..3daf242d520 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BeatsSystemUser.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BeatsSystemUser.java
@@ -16,7 +16,6 @@ public class BeatsSystemUser extends User {
public static final String NAME = UsernamesField.BEATS_NAME;
public static final String ROLE_NAME = UsernamesField.BEATS_ROLE;
public static final Version DEFINED_SINCE = Version.V_6_3_0;
- public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE);
public BeatsSystemUser(boolean enabled) {
super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled);
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BuiltinUserInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BuiltinUserInfo.java
deleted file mode 100644
index 0ecd457ba09..00000000000
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BuiltinUserInfo.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-package org.elasticsearch.xpack.core.security.user;
-
-import org.elasticsearch.Version;
-
-/**
- * BuiltinUserInfo provides common user meta data for newly introduced pre defined System Users.
- */
-public class BuiltinUserInfo {
- private final String name;
- private final String role;
- private final Version definedSince;
-
- public BuiltinUserInfo(String name, String role, Version definedSince) {
- this.name = name;
- this.role = role;
- this.definedSince = definedSince;
- }
-
- /** Get the builtin users name. */
- public String getName() {
- return name;
- }
-
- /** Get the builtin users default role name. */
- public String getRole() {
- return role;
- }
-
- /** Get version the builtin user was introduced with. */
- public Version getDefinedSince() {
- return definedSince;
- }
-}
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/MetricConfigSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/MetricConfigSerializingTests.java
index a5b8d9afead..bc2c9c3157b 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/MetricConfigSerializingTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/MetricConfigSerializingTests.java
@@ -11,14 +11,17 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers;
+import org.elasticsearch.xpack.core.rollup.RollupField;
import java.io.IOException;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.singletonList;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.isIn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -45,8 +48,8 @@ public class MetricConfigSerializingTests extends AbstractSerializingTestCase> responseMap = new HashMap<>();
+
+ // Have to mock fieldcaps because the ctor's aren't public...
+ FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
+ when(fieldCaps.isAggregatable()).thenReturn(true);
+ responseMap.put("my_field", Collections.singletonMap("date", fieldCaps));
+
+ MetricConfig config = new MetricConfig("my_field", Arrays.asList("avg", "max"));
+ config.validateMappings(responseMap, e);
+ assertThat(e.validationErrors().get(0), equalTo("Only the metrics " + RollupField.SUPPORTED_DATE_METRICS.toString() +
+ " are supported for [date] types, but unsupported metrics [avg] supplied for field [my_field]"));
+ }
+
public void testValidateMatchingField() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map> responseMap = new HashMap<>();
@@ -153,6 +171,13 @@ public class MetricConfigSerializingTests extends AbstractSerializingTestCase new ParameterizedMessage("Removing old tokens: [{}]", Strings.toString(expiredDbq)));
executeAsyncWithOrigin(client, SECURITY_ORIGIN, DeleteByQueryAction.INSTANCE, expiredDbq,
ActionListener.wrap(r -> {
debugDbqResponse(r);
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java
index 4f1ec4ad8c0..6ac7f687e24 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java
@@ -8,11 +8,8 @@ package org.elasticsearch.xpack.security.authc;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
-import org.elasticsearch.cluster.ClusterStateUpdateTask;
-import org.elasticsearch.cluster.metadata.MetaData;
-import org.elasticsearch.common.Priority;
-import org.elasticsearch.core.internal.io.IOUtils;
import org.apache.lucene.util.UnicodeUtil;
+import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
@@ -36,9 +33,12 @@ import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.ack.AckedRequest;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
+import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
+import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.cache.Cache;
@@ -60,15 +60,16 @@ import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.XPackPlugin;
import org.elasticsearch.xpack.core.XPackSettings;
-import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.security.ScrollHelper;
import org.elasticsearch.xpack.core.security.authc.Authentication;
import org.elasticsearch.xpack.core.security.authc.KeyAndTimestamp;
@@ -114,6 +115,7 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
+import java.util.function.Consumer;
import java.util.function.Supplier;
import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException;
@@ -159,6 +161,7 @@ public final class TokenService extends AbstractComponent {
static final String INVALIDATED_TOKEN_DOC_TYPE = "invalidated-token";
static final int MINIMUM_BYTES = VERSION_BYTES + SALT_BYTES + IV_BYTES + 1;
private static final int MINIMUM_BASE64_BYTES = Double.valueOf(Math.ceil((4 * MINIMUM_BYTES) / 3)).intValue();
+ private static final int MAX_RETRY_ATTEMPTS = 5;
private final SecureRandom secureRandom = new SecureRandom();
private final ClusterService clusterService;
@@ -217,9 +220,10 @@ public final class TokenService extends AbstractComponent {
boolean includeRefreshToken) throws IOException {
ensureEnabled();
if (authentication == null) {
- listener.onFailure(new IllegalArgumentException("authentication must be provided"));
+ listener.onFailure(traceLog("create token", null, new IllegalArgumentException("authentication must be provided")));
} else if (originatingClientAuth == null) {
- listener.onFailure(new IllegalArgumentException("originating client authentication must be provided"));
+ listener.onFailure(traceLog("create token", null,
+ new IllegalArgumentException("originating client authentication must be provided")));
} else {
final Instant created = clock.instant();
final Instant expiration = getExpirationTime(created);
@@ -252,16 +256,17 @@ public final class TokenService extends AbstractComponent {
.field("realm", authentication.getAuthenticatedBy().getName())
.endObject();
builder.endObject();
+ final String documentId = getTokenDocumentId(userToken);
IndexRequest request =
- client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken))
+ client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, documentId)
.setOpType(OpType.CREATE)
.setSource(builder)
.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL)
.request();
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
- executeAsyncWithOrigin(client, SECURITY_ORIGIN, IndexAction.INSTANCE, request,
- ActionListener.wrap(indexResponse -> listener.onResponse(new Tuple<>(userToken, refreshToken)),
- listener::onFailure))
+ securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", documentId, ex)),
+ () -> executeAsyncWithOrigin(client, SECURITY_ORIGIN, IndexAction.INSTANCE, request,
+ ActionListener.wrap(indexResponse -> listener.onResponse(new Tuple<>(userToken, refreshToken)),
+ listener::onFailure))
);
}
}
@@ -323,7 +328,7 @@ public final class TokenService extends AbstractComponent {
Instant currentTime = clock.instant();
if (currentTime.isAfter(userToken.getExpirationTime())) {
// token expired
- listener.onFailure(expiredTokenException());
+ listener.onFailure(traceLog("decode token", token, expiredTokenException()));
} else {
checkIfTokenIsRevoked(userToken, listener);
}
@@ -361,43 +366,50 @@ public final class TokenService extends AbstractComponent {
final Cipher cipher = getDecryptionCipher(iv, decodeKey, version, decodedSalt);
if (version.onOrAfter(Version.V_6_2_0)) {
// we only have the id and need to get the token from the doc!
- decryptTokenId(in, cipher, version, ActionListener.wrap(tokenId ->
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
- final GetRequest getRequest =
- client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE,
+ decryptTokenId(in, cipher, version, ActionListener.wrap(tokenId -> {
+ if (securityIndex.isAvailable() == false) {
+ logger.warn("failed to get token [{}] since index is not available", tokenId);
+ listener.onResponse(null);
+ } else {
+ securityIndex.checkIndexVersionThenExecute(
+ ex -> listener.onFailure(traceLog("prepare security index", tokenId, ex)),
+ () -> {
+ final GetRequest getRequest = client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE,
getTokenDocumentId(tokenId)).request();
- executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest,
- ActionListener.wrap(response -> {
- if (response.isExists()) {
- Map accessTokenSource =
+ Consumer onFailure = ex -> listener.onFailure(traceLog("decode token", tokenId, ex));
+ executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest,
+ ActionListener.wrap(response -> {
+ if (response.isExists()) {
+ Map accessTokenSource =
(Map) response.getSource().get("access_token");
- if (accessTokenSource == null) {
- listener.onFailure(new IllegalStateException("token document is missing " +
- "the access_token field"));
- } else if (accessTokenSource.containsKey("user_token") == false) {
- listener.onFailure(new IllegalStateException("token document is missing " +
- "the user_token field"));
- } else {
- Map userTokenSource =
+ if (accessTokenSource == null) {
+ onFailure.accept(new IllegalStateException(
+ "token document is missing the access_token field"));
+ } else if (accessTokenSource.containsKey("user_token") == false) {
+ onFailure.accept(new IllegalStateException(
+ "token document is missing the user_token field"));
+ } else {
+ Map userTokenSource =
(Map) accessTokenSource.get("user_token");
- listener.onResponse(UserToken.fromSourceMap(userTokenSource));
- }
- } else {
- listener.onFailure(
+ listener.onResponse(UserToken.fromSourceMap(userTokenSource));
+ }
+ } else {
+ onFailure.accept(
new IllegalStateException("token document is missing and must be present"));
- }
- }, e -> {
- // if the index or the shard is not there / available we assume that
- // the token is not valid
- if (isShardNotAvailableException(e)) {
- logger.warn("failed to get token [{}] since index is not available", tokenId);
- listener.onResponse(null);
- } else {
- logger.error(new ParameterizedMessage("failed to get token [{}]", tokenId), e);
- listener.onFailure(e);
- }
- }), client::get);
- }), listener::onFailure));
+ }
+ }, e -> {
+ // if the index or the shard is not there / available we assume that
+ // the token is not valid
+ if (isShardNotAvailableException(e)) {
+ logger.warn("failed to get token [{}] since index is not available", tokenId);
+ listener.onResponse(null);
+ } else {
+ logger.error(new ParameterizedMessage("failed to get token [{}]", tokenId), e);
+ listener.onFailure(e);
+ }
+ }), client::get);
+ });
+ }}, listener::onFailure));
} else {
decryptToken(in, cipher, version, listener);
}
@@ -461,13 +473,14 @@ public final class TokenService extends AbstractComponent {
public void invalidateAccessToken(String tokenString, ActionListener listener) {
ensureEnabled();
if (Strings.isNullOrEmpty(tokenString)) {
+ logger.trace("No token-string provided");
listener.onFailure(new IllegalArgumentException("token must be provided"));
} else {
maybeStartTokenRemover();
try {
decodeToken(tokenString, ActionListener.wrap(userToken -> {
if (userToken == null) {
- listener.onFailure(malformedTokenException());
+ listener.onFailure(traceLog("invalidate token", tokenString, malformedTokenException()));
} else {
final long expirationEpochMilli = getExpirationTime().toEpochMilli();
indexBwcInvalidation(userToken, listener, new AtomicInteger(0), expirationEpochMilli);
@@ -488,6 +501,7 @@ public final class TokenService extends AbstractComponent {
public void invalidateAccessToken(UserToken userToken, ActionListener listener) {
ensureEnabled();
if (userToken == null) {
+ logger.trace("No access token provided");
listener.onFailure(new IllegalArgumentException("token must be provided"));
} else {
maybeStartTokenRemover();
@@ -499,6 +513,7 @@ public final class TokenService extends AbstractComponent {
public void invalidateRefreshToken(String refreshToken, ActionListener listener) {
ensureEnabled();
if (Strings.isNullOrEmpty(refreshToken)) {
+ logger.trace("No refresh token provided");
listener.onFailure(new IllegalArgumentException("refresh token must be provided"));
} else {
maybeStartTokenRemover();
@@ -521,7 +536,8 @@ public final class TokenService extends AbstractComponent {
*/
private void indexBwcInvalidation(UserToken userToken, ActionListener listener, AtomicInteger attemptCount,
long expirationEpochMilli) {
- if (attemptCount.get() > 5) {
+ if (attemptCount.get() > MAX_RETRY_ATTEMPTS) {
+ logger.warn("Failed to invalidate token [{}] after [{}] attempts", userToken.getId(), attemptCount.get());
listener.onFailure(invalidGrantException("failed to invalidate token"));
} else {
final String invalidatedTokenId = getInvalidatedTokenDocumentId(userToken);
@@ -532,14 +548,15 @@ public final class TokenService extends AbstractComponent {
.request();
final String tokenDocId = getTokenDocumentId(userToken);
final Version version = userToken.getVersion();
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
- executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, indexRequest,
+ securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", tokenDocId, ex)),
+ () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, indexRequest,
ActionListener.wrap(indexResponse -> {
ActionListener wrappedListener =
ActionListener.wrap(ignore -> listener.onResponse(true), listener::onFailure);
indexInvalidation(tokenDocId, version, wrappedListener, attemptCount, "access_token", 1L);
}, e -> {
Throwable cause = ExceptionsHelper.unwrapCause(e);
+ traceLog("(bwc) invalidate token", tokenDocId, cause);
if (cause instanceof VersionConflictEngineException) {
// expected since something else could have invalidated
ActionListener wrappedListener =
@@ -566,7 +583,8 @@ public final class TokenService extends AbstractComponent {
*/
private void indexInvalidation(String tokenDocId, Version version, ActionListener listener, AtomicInteger attemptCount,
String srcPrefix, long documentVersion) {
- if (attemptCount.get() > 5) {
+ if (attemptCount.get() > MAX_RETRY_ATTEMPTS) {
+ logger.warn("Failed to invalidate token [{}] after [{}] attempts", tokenDocId, attemptCount.get());
listener.onFailure(invalidGrantException("failed to invalidate token"));
} else {
UpdateRequest request = client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId)
@@ -574,75 +592,79 @@ public final class TokenService extends AbstractComponent {
.setVersion(documentVersion)
.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL)
.request();
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
- executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
- ActionListener.wrap(updateResponse -> {
- if (updateResponse.getGetResult() != null
- && updateResponse.getGetResult().sourceAsMap().containsKey(srcPrefix)
- && ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix))
- .containsKey("invalidated")) {
- final boolean prevInvalidated = (boolean)
- ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix))
- .get("invalidated");
- listener.onResponse(prevInvalidated == false);
- } else {
- listener.onResponse(true);
- }
- }, e -> {
- Throwable cause = ExceptionsHelper.unwrapCause(e);
- if (cause instanceof DocumentMissingException) {
- if (version.onOrAfter(Version.V_6_2_0)) {
- // the document should always be there!
- listener.onFailure(e);
- } else {
- listener.onResponse(false);
- }
- } else if (cause instanceof VersionConflictEngineException
- || isShardNotAvailableException(cause)) {
- attemptCount.incrementAndGet();
- executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
- client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(),
- ActionListener.wrap(getResult -> {
- if (getResult.isExists()) {
- Map source = getResult.getSource();
- Map accessTokenSource =
- (Map) source.get("access_token");
- if (accessTokenSource == null) {
- listener.onFailure(new IllegalArgumentException("token document is " +
- "missing access_token field"));
- } else {
- Boolean invalidated = (Boolean) accessTokenSource.get("invalidated");
- if (invalidated == null) {
- listener.onFailure(new IllegalStateException(
- "token document missing invalidated value"));
- } else if (invalidated) {
- listener.onResponse(false);
- } else {
- indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix,
- getResult.getVersion());
- }
- }
- } else if (version.onOrAfter(Version.V_6_2_0)) {
- logger.warn("could not find token document [{}] but there should " +
- "be one as token has version [{}]", tokenDocId, version);
- listener.onFailure(invalidGrantException("could not invalidate the token"));
- } else {
- listener.onResponse(false);
- }
- },
- e1 -> {
- if (isShardNotAvailableException(e1)) {
- // don't increment count; call again
- indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix,
- documentVersion);
- } else {
- listener.onFailure(e1);
- }
- }), client::get);
- } else {
+ securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", tokenDocId, ex)),
+ () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
+ ActionListener.wrap(updateResponse -> {
+ logger.debug("Invalidated [{}] for doc [{}]", srcPrefix, tokenDocId);
+ if (updateResponse.getGetResult() != null
+ && updateResponse.getGetResult().sourceAsMap().containsKey(srcPrefix)
+ && ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix))
+ .containsKey("invalidated")) {
+ final boolean prevInvalidated = (boolean)
+ ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix))
+ .get("invalidated");
+ listener.onResponse(prevInvalidated == false);
+ } else {
+ listener.onResponse(true);
+ }
+ }, e -> {
+ Throwable cause = ExceptionsHelper.unwrapCause(e);
+ traceLog("invalidate token", tokenDocId, cause);
+ if (cause instanceof DocumentMissingException) {
+ if (version.onOrAfter(Version.V_6_2_0)) {
+ // the document should always be there!
listener.onFailure(e);
+ } else {
+ listener.onResponse(false);
}
- }), client::update));
+ } else if (cause instanceof VersionConflictEngineException
+ || isShardNotAvailableException(cause)) {
+ attemptCount.incrementAndGet();
+ executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
+ client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(),
+ ActionListener.wrap(getResult -> {
+ if (getResult.isExists()) {
+ Map source = getResult.getSource();
+ Map accessTokenSource = (Map) source.get("access_token");
+ Consumer onFailure = ex -> listener.onFailure(traceLog("get token", tokenDocId, ex));
+ if (accessTokenSource == null) {
+ onFailure.accept(new IllegalArgumentException(
+ "token document is missing access_token field"));
+ } else {
+ Boolean invalidated = (Boolean) accessTokenSource.get("invalidated");
+ if (invalidated == null) {
+ onFailure.accept(new IllegalStateException(
+ "token document missing invalidated value"));
+ } else if (invalidated) {
+ logger.trace("Token [{}] is already invalidated", tokenDocId);
+ listener.onResponse(false);
+ } else {
+ indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix,
+ getResult.getVersion());
+ }
+ }
+ } else if (version.onOrAfter(Version.V_6_2_0)) {
+ logger.warn("could not find token document [{}] but there should " +
+ "be one as token has version [{}]", tokenDocId, version);
+ listener.onFailure(invalidGrantException("could not invalidate the token"));
+ } else {
+ listener.onResponse(false);
+ }
+ },
+ e1 -> {
+ traceLog("get token", tokenDocId, e1);
+ if (isShardNotAvailableException(e1)) {
+ // don't increment count; call again
+ indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix,
+ documentVersion);
+ } else {
+ listener.onFailure(e1);
+ }
+ }), client::get);
+ } else {
+ listener.onFailure(e);
+ }
+ }), client::update));
}
}
@@ -663,7 +685,8 @@ public final class TokenService extends AbstractComponent {
private void findTokenFromRefreshToken(String refreshToken, ActionListener> listener,
AtomicInteger attemptCount) {
- if (attemptCount.get() > 5) {
+ if (attemptCount.get() > MAX_RETRY_ATTEMPTS) {
+ logger.warn("Failed to find token for refresh token [{}] after [{}] attempts", refreshToken, attemptCount.get());
listener.onFailure(invalidGrantException("could not refresh the requested token"));
} else {
SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
@@ -673,30 +696,37 @@ public final class TokenService extends AbstractComponent {
.setVersion(true)
.request();
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
+ if (securityIndex.isAvailable() == false) {
+ logger.debug("security index is not available to find token from refresh token, retrying");
+ attemptCount.incrementAndGet();
+ findTokenFromRefreshToken(refreshToken, listener, attemptCount);
+ } else {
+ Consumer onFailure = ex -> listener.onFailure(traceLog("find by refresh token", refreshToken, ex));
+ securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
- ActionListener.wrap(searchResponse -> {
- if (searchResponse.isTimedOut()) {
- attemptCount.incrementAndGet();
- findTokenFromRefreshToken(refreshToken, listener, attemptCount);
- } else if (searchResponse.getHits().getHits().length < 1) {
- logger.info("could not find token document with refresh_token [{}]", refreshToken);
- listener.onFailure(invalidGrantException("could not refresh the requested token"));
- } else if (searchResponse.getHits().getHits().length > 1) {
- listener.onFailure(new IllegalStateException("multiple tokens share the same refresh token"));
- } else {
- listener.onResponse(new Tuple<>(searchResponse, attemptCount));
- }
- }, e -> {
- if (isShardNotAvailableException(e)) {
- logger.debug("failed to search for token document, retrying", e);
- attemptCount.incrementAndGet();
- findTokenFromRefreshToken(refreshToken, listener, attemptCount);
- } else {
- listener.onFailure(e);
- }
- }),
- client::search));
+ ActionListener.wrap(searchResponse -> {
+ if (searchResponse.isTimedOut()) {
+ attemptCount.incrementAndGet();
+ findTokenFromRefreshToken(refreshToken, listener, attemptCount);
+ } else if (searchResponse.getHits().getHits().length < 1) {
+ logger.info("could not find token document with refresh_token [{}]", refreshToken);
+ onFailure.accept(invalidGrantException("could not refresh the requested token"));
+ } else if (searchResponse.getHits().getHits().length > 1) {
+ onFailure.accept(new IllegalStateException("multiple tokens share the same refresh token"));
+ } else {
+ listener.onResponse(new Tuple<>(searchResponse, attemptCount));
+ }
+ }, e -> {
+ if (isShardNotAvailableException(e)) {
+ logger.debug("failed to search for token document, retrying", e);
+ attemptCount.incrementAndGet();
+ findTokenFromRefreshToken(refreshToken, listener, attemptCount);
+ } else {
+ onFailure.accept(e);
+ }
+ }),
+ client::search));
+ }
}
}
@@ -707,62 +737,64 @@ public final class TokenService extends AbstractComponent {
*/
private void innerRefresh(String tokenDocId, Authentication userAuth, ActionListener> listener,
AtomicInteger attemptCount) {
- if (attemptCount.getAndIncrement() > 5) {
+ if (attemptCount.getAndIncrement() > MAX_RETRY_ATTEMPTS) {
+ logger.warn("Failed to refresh token for doc [{}] after [{}] attempts", tokenDocId, attemptCount.get());
listener.onFailure(invalidGrantException("could not refresh the requested token"));
} else {
+ Consumer onFailure = ex -> listener.onFailure(traceLog("refresh token", tokenDocId, ex));
GetRequest getRequest = client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request();
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest,
- ActionListener.wrap(response -> {
- if (response.isExists()) {
- final Map source = response.getSource();
- final Optional invalidSource = checkTokenDocForRefresh(source, userAuth);
+ ActionListener.wrap(response -> {
+ if (response.isExists()) {
+ final Map source = response.getSource();
+ final Optional invalidSource = checkTokenDocForRefresh(source, userAuth);
- if (invalidSource.isPresent()) {
- listener.onFailure(invalidSource.get());
- } else {
- final Map userTokenSource = (Map)
- ((Map) source.get("access_token")).get("user_token");
- final String authString = (String) userTokenSource.get("authentication");
- final Integer version = (Integer) userTokenSource.get("version");
- final Map metadata = (Map) userTokenSource.get("metadata");
+ if (invalidSource.isPresent()) {
+ onFailure.accept(invalidSource.get());
+ } else {
+ final Map userTokenSource = (Map)
+ ((Map) source.get("access_token")).get("user_token");
+ final String authString = (String) userTokenSource.get("authentication");
+ final Integer version = (Integer) userTokenSource.get("version");
+ final Map metadata = (Map) userTokenSource.get("metadata");
- Version authVersion = Version.fromId(version);
- try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(authString))) {
- in.setVersion(authVersion);
- Authentication authentication = new Authentication(in);
- UpdateRequest updateRequest =
- client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId)
- .setVersion(response.getVersion())
- .setDoc("refresh_token", Collections.singletonMap("refreshed", true))
- .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL)
- .request();
- executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, updateRequest,
- ActionListener.wrap(
- updateResponse -> createUserToken(authentication, userAuth, listener, metadata, true),
- e -> {
- Throwable cause = ExceptionsHelper.unwrapCause(e);
- if (cause instanceof VersionConflictEngineException ||
- isShardNotAvailableException(e)) {
- innerRefresh(tokenDocId, userAuth,
- listener, attemptCount);
- } else {
- listener.onFailure(e);
- }
- }),
- client::update);
- }
+ Version authVersion = Version.fromId(version);
+ try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(authString))) {
+ in.setVersion(authVersion);
+ Authentication authentication = new Authentication(in);
+ UpdateRequest updateRequest =
+ client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId)
+ .setVersion(response.getVersion())
+ .setDoc("refresh_token", Collections.singletonMap("refreshed", true))
+ .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL)
+ .request();
+ executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, updateRequest,
+ ActionListener.wrap(
+ updateResponse -> createUserToken(authentication, userAuth, listener, metadata, true),
+ e -> {
+ Throwable cause = ExceptionsHelper.unwrapCause(e);
+ if (cause instanceof VersionConflictEngineException ||
+ isShardNotAvailableException(e)) {
+ innerRefresh(tokenDocId, userAuth,
+ listener, attemptCount);
+ } else {
+ onFailure.accept(e);
+ }
+ }),
+ client::update);
}
- } else {
- logger.info("could not find token document [{}] for refresh", tokenDocId);
- listener.onFailure(invalidGrantException("could not refresh the requested token"));
}
- }, e -> {
- if (isShardNotAvailableException(e)) {
- innerRefresh(tokenDocId, userAuth, listener, attemptCount);
- } else {
- listener.onFailure(e);
- }
- }), client::get);
+ } else {
+ logger.info("could not find token document [{}] for refresh", tokenDocId);
+ onFailure.accept(invalidGrantException("could not refresh the requested token"));
+ }
+ }, e -> {
+ if (isShardNotAvailableException(e)) {
+ innerRefresh(tokenDocId, userAuth, listener, attemptCount);
+ } else {
+ listener.onFailure(e);
+ }
+ }), client::get);
}
}
@@ -831,22 +863,22 @@ public final class TokenService extends AbstractComponent {
if (Strings.isNullOrEmpty(realmName)) {
listener.onFailure(new IllegalArgumentException("Realm name is required"));
- return;
- }
-
- final Instant now = clock.instant();
- final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery()
+ } else if (securityIndex.isAvailable() == false) {
+ listener.onResponse(Collections.emptyList());
+ } else {
+ final Instant now = clock.instant();
+ final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("doc_type", "token"))
.filter(QueryBuilders.termQuery("access_token.realm", realmName))
.filter(QueryBuilders.boolQuery()
- .should(QueryBuilders.boolQuery()
- .must(QueryBuilders.termQuery("access_token.invalidated", false))
- .must(QueryBuilders.rangeQuery("access_token.user_token.expiration_time").gte(now.toEpochMilli()))
- )
- .should(QueryBuilders.termQuery("refresh_token.invalidated", false))
+ .should(QueryBuilders.boolQuery()
+ .must(QueryBuilders.termQuery("access_token.invalidated", false))
+ .must(QueryBuilders.rangeQuery("access_token.user_token.expiration_time").gte(now.toEpochMilli()))
+ )
+ .should(QueryBuilders.termQuery("refresh_token.invalidated", false))
);
- final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
+ final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME)
.setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings))
.setQuery(boolQuery)
.setVersion(false)
@@ -854,9 +886,10 @@ public final class TokenService extends AbstractComponent {
.setFetchSource(true)
.request();
- final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false);
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
- ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), this::parseHit));
+ final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false);
+ securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
+ ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), this::parseHit));
+ }
}
private Tuple parseHit(SearchHit hit) {
@@ -923,14 +956,17 @@ public final class TokenService extends AbstractComponent {
*/
private void checkIfTokenIsRevoked(UserToken userToken, ActionListener listener) {
if (securityIndex.indexExists() == false) {
- // index doesn't exist so the token is considered valid.
+ // index doesn't exist so the token is considered valid. it is important to note that
+ // we do not use isAvailable as the lack of a shard being available is not equivalent
+ // to the index not existing in the case of revocation checking.
listener.onResponse(userToken);
} else {
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
+ securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
MultiGetRequest mGetRequest = client.prepareMultiGet()
.add(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getInvalidatedTokenDocumentId(userToken))
.add(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken))
.request();
+ Consumer onFailure = ex -> listener.onFailure(traceLog("check token state", userToken.getId(), ex));
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
mGetRequest,
new ActionListener() {
@@ -941,26 +977,26 @@ public final class TokenService extends AbstractComponent {
if (itemResponse[0].isFailed()) {
onFailure(itemResponse[0].getFailure().getFailure());
} else if (itemResponse[0].getResponse().isExists()) {
- listener.onFailure(expiredTokenException());
+ onFailure.accept(expiredTokenException());
} else if (itemResponse[1].isFailed()) {
onFailure(itemResponse[1].getFailure().getFailure());
} else if (itemResponse[1].getResponse().isExists()) {
Map source = itemResponse[1].getResponse().getSource();
Map accessTokenSource = (Map) source.get("access_token");
if (accessTokenSource == null) {
- listener.onFailure(new IllegalStateException("token document is missing access_token field"));
+ onFailure.accept(new IllegalStateException("token document is missing access_token field"));
} else {
Boolean invalidated = (Boolean) accessTokenSource.get("invalidated");
if (invalidated == null) {
- listener.onFailure(new IllegalStateException("token document is missing invalidated field"));
+ onFailure.accept(new IllegalStateException("token document is missing invalidated field"));
} else if (invalidated) {
- listener.onFailure(expiredTokenException());
+ onFailure.accept(expiredTokenException());
} else {
listener.onResponse(userToken);
}
}
} else if (userToken.getVersion().onOrAfter(Version.V_6_2_0)) {
- listener.onFailure(new IllegalStateException("token document is missing and must be present"));
+ onFailure.accept(new IllegalStateException("token document is missing and must be present"));
} else {
listener.onResponse(userToken);
}
@@ -1122,11 +1158,31 @@ public final class TokenService extends AbstractComponent {
*/
private static ElasticsearchSecurityException invalidGrantException(String detail) {
ElasticsearchSecurityException e =
- new ElasticsearchSecurityException("invalid_grant", RestStatus.BAD_REQUEST);
+ new ElasticsearchSecurityException("invalid_grant", RestStatus.BAD_REQUEST);
e.addHeader("error_description", detail);
return e;
}
+ /**
+ * Logs an exception at TRACE level (if enabled)
+ */
+ private E traceLog(String action, String identifier, E exception) {
+ if (logger.isTraceEnabled()) {
+ if (exception instanceof ElasticsearchException) {
+ final ElasticsearchException esEx = (ElasticsearchException) exception;
+ final Object detail = esEx.getHeader("error_description");
+ if (detail != null) {
+ logger.trace("Failure in [{}] for id [{}] - [{}] [{}]", action, identifier, detail, esEx.getDetailedMessage());
+ } else {
+ logger.trace("Failure in [{}] for id [{}] - [{}]", action, identifier, esEx.getDetailedMessage());
+ }
+ } else {
+ logger.trace("Failure in [{}] for id [{}] - [{}]", action, identifier, exception.toString());
+ }
+ }
+ return exception;
+ }
+
boolean isExpiredTokenException(ElasticsearchSecurityException e) {
final List headers = e.getHeader("WWW-Authenticate");
return headers != null && headers.stream().anyMatch(EXPIRED_TOKEN_WWW_AUTH_VALUE::equals);
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java
index 620c3817ebb..196a48416a4 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java
@@ -118,8 +118,7 @@ public class NativeUsersStore extends AbstractComponent {
}
};
- if (securityIndex.indexExists() == false) {
- // TODO remove this short circuiting and fix tests that fail without this!
+ if (securityIndex.isAvailable() == false) {
listener.onResponse(Collections.emptyList());
} else if (userNames.length == 1) { // optimization for single user lookup
final String username = userNames[0];
@@ -127,7 +126,7 @@ public class NativeUsersStore extends AbstractComponent {
(uap) -> listener.onResponse(uap == null ? Collections.emptyList() : Collections.singletonList(uap.user())),
handleException));
} else {
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
+ securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
final QueryBuilder query;
if (userNames == null || userNames.length == 0) {
query = QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), USER_DOC_TYPE);
@@ -155,10 +154,10 @@ public class NativeUsersStore extends AbstractComponent {
}
void getUserCount(final ActionListener listener) {
- if (securityIndex.indexExists() == false) {
+ if (securityIndex.isAvailable() == false) {
listener.onResponse(0L);
} else {
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
+ securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
client.prepareSearch(SECURITY_INDEX_NAME)
.setQuery(QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), USER_DOC_TYPE))
@@ -182,11 +181,10 @@ public class NativeUsersStore extends AbstractComponent {
* Async method to retrieve a user and their password
*/
private void getUserAndPassword(final String user, final ActionListener listener) {
- if (securityIndex.indexExists() == false) {
- // TODO remove this short circuiting and fix tests that fail without this!
+ if (securityIndex.isAvailable() == false) {
listener.onResponse(null);
} else {
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
+ securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
client.prepareGet(SECURITY_INDEX_NAME,
INDEX_TYPE, getIdForUser(USER_DOC_TYPE, user)).request(),
@@ -459,16 +457,19 @@ public class NativeUsersStore extends AbstractComponent {
}
public void deleteUser(final DeleteUserRequest deleteUserRequest, final ActionListener listener) {
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
- DeleteRequest request = client.prepareDelete(SECURITY_INDEX_NAME,
+ if (securityIndex.isAvailable() == false) {
+ listener.onResponse(false);
+ } else {
+ securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
+ DeleteRequest request = client.prepareDelete(SECURITY_INDEX_NAME,
INDEX_TYPE, getIdForUser(USER_DOC_TYPE, deleteUserRequest.username())).request();
- request.setRefreshPolicy(deleteUserRequest.getRefreshPolicy());
- executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
+ request.setRefreshPolicy(deleteUserRequest.getRefreshPolicy());
+ executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request,
new ActionListener() {
@Override
public void onResponse(DeleteResponse deleteResponse) {
clearRealmCache(deleteUserRequest.username(), listener,
- deleteResponse.getResult() == DocWriteResponse.Result.DELETED);
+ deleteResponse.getResult() == DocWriteResponse.Result.DELETED);
}
@Override
@@ -476,7 +477,8 @@ public class NativeUsersStore extends AbstractComponent {
listener.onFailure(e);
}
}, client::delete);
- });
+ });
+ }
}
/**
@@ -498,11 +500,10 @@ public class NativeUsersStore extends AbstractComponent {
}
void getReservedUserInfo(String username, ActionListener listener) {
- if (securityIndex.indexExists() == false) {
- // TODO remove this short circuiting and fix tests that fail without this!
+ if (securityIndex.isAvailable() == false) {
listener.onResponse(null);
} else {
- securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () ->
+ securityIndex.checkIndexVersionThenExecute(listener::onFailure, () ->
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
client.prepareGet(SECURITY_INDEX_NAME, INDEX_TYPE,
getIdForUser(RESERVED_USER_TYPE, username)).request(),
@@ -541,49 +542,53 @@ public class NativeUsersStore extends AbstractComponent {
}
void getAllReservedUserInfo(ActionListener