diff --git a/.ci/java-versions.properties b/.ci/java-versions.properties index 9202be97bd9..c902a692071 100644 --- a/.ci/java-versions.properties +++ b/.ci/java-versions.properties @@ -6,3 +6,5 @@ ES_BUILD_JAVA=java11 ES_RUNTIME_JAVA=java8 +GRADLE_TASK=build + diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 2ff4b3e1d34..bfe5d9504c0 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -7,8 +7,8 @@ - - + + @@ -42,7 +42,7 @@ - + @@ -154,7 +154,6 @@ - @@ -182,7 +181,6 @@ - @@ -237,7 +235,6 @@ - @@ -259,7 +256,6 @@ - @@ -267,7 +263,6 @@ - @@ -283,13 +278,10 @@ - - - @@ -307,18 +299,15 @@ - - - @@ -326,7 +315,6 @@ - @@ -360,15 +348,11 @@ - - - - @@ -380,12 +364,10 @@ - - @@ -435,13 +417,10 @@ - - - @@ -550,18 +529,15 @@ - - - @@ -572,11 +548,9 @@ - - @@ -597,7 +571,6 @@ - @@ -637,9 +610,6 @@ - - - @@ -650,12 +620,10 @@ - - @@ -667,7 +635,6 @@ - @@ -684,7 +651,6 @@ - @@ -716,10 +682,6 @@ - - - - diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index bfe7c3d956c..1a41a493586 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -85,6 +85,7 @@ integTestRunner { } integTestCluster { + systemProperty 'es.scripting.update.ctx_in_params', 'false' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' // Truststore settings are not used since TLS is not enabled. Included for testing the get certificates API diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 79d2ccacf61..4e3f778cd15 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -280,7 +280,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { .startObject() .startObject("script") .field("lang", "painless") - .field("code", "ctx._source.field += params.count") + .field("source", "ctx._source.field += params.count") .endObject() .endObject())); Response response = client().performRequest(request); @@ -991,10 +991,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // tag::update-by-query-request-conflicts request.setConflicts("proceed"); // <1> // end::update-by-query-request-conflicts - // tag::update-by-query-request-typeOrQuery - request.setDocTypes("doc"); // <1> - request.setQuery(new TermQueryBuilder("user", "kimchy")); // <2> - // end::update-by-query-request-typeOrQuery + // tag::update-by-query-request-query + request.setQuery(new TermQueryBuilder("user", "kimchy")); // <1> + // end::update-by-query-request-query // tag::update-by-query-request-size request.setSize(10); // <1> // end::update-by-query-request-size @@ -1110,10 +1109,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // tag::delete-by-query-request-conflicts request.setConflicts("proceed"); // <1> // end::delete-by-query-request-conflicts - // tag::delete-by-query-request-typeOrQuery - request.setDocTypes("doc"); // <1> - request.setQuery(new TermQueryBuilder("user", "kimchy")); // <2> - // end::delete-by-query-request-typeOrQuery + // tag::delete-by-query-request-query + request.setQuery(new TermQueryBuilder("user", "kimchy")); // <1> + // end::delete-by-query-request-query // tag::delete-by-query-request-size request.setSize(10); // <1> // end::delete-by-query-request-size diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 4382924bb97..1e596158750 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -140,10 +140,9 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { // end::search-request-basic } { - // tag::search-request-indices-types + // tag::search-request-indices SearchRequest searchRequest = new SearchRequest("posts"); // <1> - searchRequest.types("doc"); // <2> - // end::search-request-indices-types + // end::search-request-indices // tag::search-request-routing searchRequest.routing("routing"); // <1> // end::search-request-routing diff --git a/docs/java-rest/high-level/document/delete-by-query.asciidoc b/docs/java-rest/high-level/document/delete-by-query.asciidoc index 5ec246a9121..3a4c8a15dea 100644 --- a/docs/java-rest/high-level/document/delete-by-query.asciidoc +++ b/docs/java-rest/high-level/document/delete-by-query.asciidoc @@ -24,14 +24,13 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-con -------------------------------------------------- <1> Set `proceed` on version conflict -You can limit the documents by adding a type to the source or by adding a query. +You can limit the documents by adding a query. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-typeOrQuery] +include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-by-query-request-query] -------------------------------------------------- -<1> Only copy `doc` type -<2> Only copy documents which have field `user` set to `kimchy` +<1> Only copy documents which have field `user` set to `kimchy` It’s also possible to limit the number of processed documents by setting size. diff --git a/docs/java-rest/high-level/document/update-by-query.asciidoc b/docs/java-rest/high-level/document/update-by-query.asciidoc index 324385a442b..5c7e4f5d3b0 100644 --- a/docs/java-rest/high-level/document/update-by-query.asciidoc +++ b/docs/java-rest/high-level/document/update-by-query.asciidoc @@ -25,14 +25,13 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-con -------------------------------------------------- <1> Set `proceed` on version conflict -You can limit the documents by adding a type to the source or by adding a query. +You can limit the documents by adding a query. ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-typeOrQuery] +include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-by-query-request-query] -------------------------------------------------- -<1> Only copy `doc` type -<2> Only copy documents which have field `user` set to `kimchy` +<1> Only copy documents which have field `user` set to `kimchy` It’s also possible to limit the number of processed documents by setting size. diff --git a/docs/java-rest/high-level/search/multi-search.asciidoc b/docs/java-rest/high-level/search/multi-search.asciidoc index 5d5910be300..279b1c7fa39 100644 --- a/docs/java-rest/high-level/search/multi-search.asciidoc +++ b/docs/java-rest/high-level/search/multi-search.asciidoc @@ -28,10 +28,9 @@ For example: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-indices-types] +include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-indices] -------------------------------------------------- <1> Restricts the request to an index -<2> Limits the request to a type [[java-rest-high-multi-search-sync]] ==== Synchronous Execution diff --git a/docs/java-rest/high-level/search/search.asciidoc b/docs/java-rest/high-level/search/search.asciidoc index e2bcfda79e6..81e680b9250 100644 --- a/docs/java-rest/high-level/search/search.asciidoc +++ b/docs/java-rest/high-level/search/search.asciidoc @@ -33,10 +33,9 @@ Let's first look at some of the optional arguments of a +{request}+: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests-file}[{api}-request-indices-types] +include-tagged::{doc-tests-file}[{api}-request-indices] -------------------------------------------------- <1> Restricts the request to an index -<2> Limits the request to a type There are a couple of other interesting optional parameters: diff --git a/docs/reference/upgrade/cluster_restart.asciidoc b/docs/reference/upgrade/cluster_restart.asciidoc index 06d5e96f8ef..85b6fffdb2e 100644 --- a/docs/reference/upgrade/cluster_restart.asciidoc +++ b/docs/reference/upgrade/cluster_restart.asciidoc @@ -38,6 +38,10 @@ include::shut-down-node.asciidoc[] . *Upgrade all nodes.* + -- +include::remove-xpack.asciidoc[] +-- ++ +-- include::upgrade-node.asciidoc[] include::set-paths-tip.asciidoc[] -- @@ -47,8 +51,6 @@ include::set-paths-tip.asciidoc[] Use the `elasticsearch-plugin` script to install the upgraded version of each installed Elasticsearch plugin. All plugins must be upgraded when you upgrade a node. -+ -include::remove-xpack.asciidoc[] . *Start each upgraded node.* + diff --git a/docs/reference/upgrade/remove-xpack.asciidoc b/docs/reference/upgrade/remove-xpack.asciidoc index 9d4c4c9f779..eb13cec074b 100644 --- a/docs/reference/upgrade/remove-xpack.asciidoc +++ b/docs/reference/upgrade/remove-xpack.asciidoc @@ -1,4 +1,8 @@ -IMPORTANT: If you use {xpack} and are upgrading from a version prior to 6.3, -remove {xpack} before restarting: `bin/elasticsearch-plugin remove x-pack`. As -of 6.3, {xpack} is included in the default distribution. The node will fail to -start if the old {xpack} plugin is present. +IMPORTANT: If you are upgrading from a version prior to 6.3 and use {xpack} +then you must remove the {xpack} plugin before upgrading with +`bin/elasticsearch-plugin remove x-pack`. As of 6.3, {xpack} is included in +the default distribution so make sure to upgrade to that one. If you upgrade +without removing the {xpack} plugin first the node will fail to start. If you +did not remove the {xpack} plugin and the node fails to start then you must +downgrade to your previous version, remove {xpack}, and then upgrade again. +In general downgrading is not supported but in this particular case it is. diff --git a/docs/reference/upgrade/rolling_upgrade.asciidoc b/docs/reference/upgrade/rolling_upgrade.asciidoc index e2edb6b2922..fc136f25499 100644 --- a/docs/reference/upgrade/rolling_upgrade.asciidoc +++ b/docs/reference/upgrade/rolling_upgrade.asciidoc @@ -44,6 +44,10 @@ include::shut-down-node.asciidoc[] . *Upgrade the node you shut down.* + -- +include::remove-xpack.asciidoc[] +-- ++ +-- include::upgrade-node.asciidoc[] include::set-paths-tip.asciidoc[] -- @@ -53,8 +57,6 @@ include::set-paths-tip.asciidoc[] Use the `elasticsearch-plugin` script to install the upgraded version of each installed Elasticsearch plugin. All plugins must be upgraded when you upgrade a node. -+ -include::remove-xpack.asciidoc[] . *Start the upgraded node.* + diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionNumberSortScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionNumberSortScript.java new file mode 100644 index 00000000000..4a7bbc6182e --- /dev/null +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionNumberSortScript.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.expression; + +import java.io.IOException; +import org.apache.lucene.expressions.Bindings; +import org.apache.lucene.expressions.Expression; +import org.apache.lucene.expressions.SimpleBindings; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DoubleValues; +import org.apache.lucene.search.DoubleValuesSource; +import org.elasticsearch.script.GeneralScriptException; +import org.elasticsearch.script.NumberSortScript; + +/** + * A bridge to evaluate an {@link Expression} against {@link Bindings} in the context + * of a {@link NumberSortScript}. + */ +class ExpressionNumberSortScript implements NumberSortScript.LeafFactory { + + final Expression exprScript; + final SimpleBindings bindings; + final DoubleValuesSource source; + final boolean needsScores; + + ExpressionNumberSortScript(Expression e, SimpleBindings b, boolean needsScores) { + exprScript = e; + bindings = b; + source = exprScript.getDoubleValuesSource(bindings); + this.needsScores = needsScores; + } + + @Override + public NumberSortScript newInstance(final LeafReaderContext leaf) throws IOException { + return new NumberSortScript() { + // Fake the scorer until setScorer is called. + DoubleValues values = source.getValues(leaf, new DoubleValues() { + @Override + public double doubleValue() { + return 0.0D; + } + + @Override + public boolean advanceExact(int doc) { + return true; + } + }); + + @Override + public double execute() { + try { + return values.doubleValue(); + } catch (Exception exception) { + throw new GeneralScriptException("Error evaluating " + exprScript, exception); + } + } + + @Override + public void setDocument(int d) { + try { + values.advanceExact(d); + } catch (IOException e) { + throw new IllegalStateException("Can't advance to doc using " + exprScript, e); + } + } + }; + } + + @Override + public boolean needs_score() { + return needsScores; + } + +} diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index 14d701fad8b..5d47c87a445 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -42,6 +42,7 @@ import org.elasticsearch.script.BucketAggregationScript; import org.elasticsearch.script.BucketAggregationSelectorScript; import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.FilterScript; +import org.elasticsearch.script.NumberSortScript; import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; @@ -135,6 +136,9 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE } else if (context.instanceClazz.equals(AggregationScript.class)) { AggregationScript.Factory factory = (p, lookup) -> newAggregationScript(expr, lookup, p); return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(NumberSortScript.class)) { + NumberSortScript.Factory factory = (p, lookup) -> newSortScript(expr, lookup, p); + return context.factoryClazz.cast(factory); } throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]"); } @@ -187,7 +191,6 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE // noop: _value is special for aggregations, and is handled in ExpressionScriptBindings // TODO: if some uses it in a scoring expression, they will get a nasty failure when evaluating...need a // way to know this is for aggregations and so _value is ok to have... - } else if (vars != null && vars.containsKey(variable)) { bindFromParams(vars, bindings, variable); } else { @@ -205,6 +208,33 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE return new ExpressionSearchScript(expr, bindings, specialValue, needsScores); } + private NumberSortScript.LeafFactory newSortScript(Expression expr, SearchLookup lookup, @Nullable Map vars) { + // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, + // instead of complicating SimpleBindings (which should stay simple) + SimpleBindings bindings = new SimpleBindings(); + boolean needsScores = false; + for (String variable : expr.variables) { + try { + if (variable.equals("_score")) { + bindings.add(new SortField("_score", SortField.Type.SCORE)); + needsScores = true; + } else if (vars != null && vars.containsKey(variable)) { + bindFromParams(vars, bindings, variable); + } else { + // delegate valuesource creation based on field's type + // there are three types of "fields" to expressions, and each one has a different "api" of variables and methods. + final ValueSource valueSource = getDocValueSource(variable, lookup); + needsScores |= valueSource.getSortField(false).needsScores(); + bindings.add(variable, valueSource.asDoubleValuesSource()); + } + } catch (Exception e) { + // we defer "binding" of variables until here: give context for that variable + throw convertToScriptException("link error", expr.sourceText, variable, e); + } + } + return new ExpressionNumberSortScript(expr, bindings, needsScores); + } + private TermsSetQueryScript.LeafFactory newTermsSetQueryScript(Expression expr, SearchLookup lookup, @Nullable Map vars) { // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java new file mode 100644 index 00000000000..301fd2d4db7 --- /dev/null +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/ExpressionNumberSortScriptTests.java @@ -0,0 +1,105 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.expression; + +import java.io.IOException; +import java.text.ParseException; +import java.util.Collections; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.fielddata.AtomicNumericFieldData; +import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; +import org.elasticsearch.script.NumberSortScript; +import org.elasticsearch.script.ScriptException; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.test.ESTestCase; + +import static org.mockito.Matchers.anyInt; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ExpressionNumberSortScriptTests extends ESTestCase { + private ExpressionScriptEngine service; + private SearchLookup lookup; + + @Override + public void setUp() throws Exception { + super.setUp(); + + NumberFieldType fieldType = new NumberFieldType(NumberType.DOUBLE); + MapperService mapperService = mock(MapperService.class); + when(mapperService.fullName("field")).thenReturn(fieldType); + when(mapperService.fullName("alias")).thenReturn(fieldType); + + SortedNumericDoubleValues doubleValues = mock(SortedNumericDoubleValues.class); + when(doubleValues.advanceExact(anyInt())).thenReturn(true); + when(doubleValues.nextValue()).thenReturn(2.718); + + AtomicNumericFieldData atomicFieldData = mock(AtomicNumericFieldData.class); + when(atomicFieldData.getDoubleValues()).thenReturn(doubleValues); + + IndexNumericFieldData fieldData = mock(IndexNumericFieldData.class); + when(fieldData.getFieldName()).thenReturn("field"); + when(fieldData.load(anyObject())).thenReturn(atomicFieldData); + + service = new ExpressionScriptEngine(Settings.EMPTY); + lookup = new SearchLookup(mapperService, ignored -> fieldData, null); + } + + private NumberSortScript.LeafFactory compile(String expression) { + NumberSortScript.Factory factory = + service.compile(null, expression, NumberSortScript.CONTEXT, Collections.emptyMap()); + return factory.newFactory(Collections.emptyMap(), lookup); + } + + public void testCompileError() { + ScriptException e = expectThrows(ScriptException.class, () -> { + compile("doc['field'].value * *@#)(@$*@#$ + 4"); + }); + assertTrue(e.getCause() instanceof ParseException); + } + + public void testLinkError() { + ScriptException e = expectThrows(ScriptException.class, () -> { + compile("doc['nonexistent'].value * 5"); + }); + assertTrue(e.getCause() instanceof ParseException); + } + + public void testFieldAccess() throws IOException { + NumberSortScript script = compile("doc['field'].value").newInstance(null); + script.setDocument(1); + + double result = script.execute(); + assertEquals(2.718, result, 0.0); + } + + public void testFieldAccessWithFieldAlias() throws IOException { + NumberSortScript script = compile("doc['alias'].value").newInstance(null); + script.setDocument(1); + + double result = script.execute(); + assertEquals(2.718, result, 0.0); + } +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java index 86f2af32d16..eeb636d6697 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/NeedsScoreTests.java @@ -23,8 +23,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.script.NumberSortScript; import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -43,25 +43,25 @@ public class NeedsScoreTests extends ESSingleNodeTestCase { IndexService index = createIndex("test", Settings.EMPTY, "type", "d", "type=double"); Map, List> contexts = new HashMap<>(); - contexts.put(SearchScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(NumberSortScript.CONTEXT, Whitelist.BASE_WHITELISTS); PainlessScriptEngine service = new PainlessScriptEngine(Settings.EMPTY, contexts); QueryShardContext shardContext = index.newQueryShardContext(0, null, () -> 0, null); SearchLookup lookup = new SearchLookup(index.mapperService(), shardContext::getForField, null); - SearchScript.Factory factory = service.compile(null, "1.2", SearchScript.CONTEXT, Collections.emptyMap()); - SearchScript.LeafFactory ss = factory.newFactory(Collections.emptyMap(), lookup); + NumberSortScript.Factory factory = service.compile(null, "1.2", NumberSortScript.CONTEXT, Collections.emptyMap()); + NumberSortScript.LeafFactory ss = factory.newFactory(Collections.emptyMap(), lookup); assertFalse(ss.needs_score()); - factory = service.compile(null, "doc['d'].value", SearchScript.CONTEXT, Collections.emptyMap()); + factory = service.compile(null, "doc['d'].value", NumberSortScript.CONTEXT, Collections.emptyMap()); ss = factory.newFactory(Collections.emptyMap(), lookup); assertFalse(ss.needs_score()); - factory = service.compile(null, "1/_score", SearchScript.CONTEXT, Collections.emptyMap()); + factory = service.compile(null, "1/_score", NumberSortScript.CONTEXT, Collections.emptyMap()); ss = factory.newFactory(Collections.emptyMap(), lookup); assertTrue(ss.needs_score()); - factory = service.compile(null, "doc['d'].value * _score", SearchScript.CONTEXT, Collections.emptyMap()); + factory = service.compile(null, "doc['d'].value * _score", NumberSortScript.CONTEXT, Collections.emptyMap()); ss = factory.newFactory(Collections.emptyMap(), lookup); assertTrue(ss.needs_score()); } diff --git a/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java b/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java new file mode 100644 index 00000000000..1d8de9f95f4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/AbstractSortScript.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.script; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorable; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.lucene.ScorerAware; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.search.lookup.LeafSearchLookup; +import org.elasticsearch.search.lookup.SearchLookup; + +abstract class AbstractSortScript implements ScorerAware { + + private static final Map DEPRECATIONS; + + static { + Map deprecations = new HashMap<>(); + deprecations.put( + "doc", + "Accessing variable [doc] via [params.doc] from within a sort-script " + + "is deprecated in favor of directly accessing [doc]." + ); + deprecations.put( + "_doc", + "Accessing variable [doc] via [params._doc] from within a sort-script " + + "is deprecated in favor of directly accessing [doc]." + ); + DEPRECATIONS = Collections.unmodifiableMap(deprecations); + } + + /** + * The generic runtime parameters for the script. + */ + private final Map params; + + /** A scorer that will return the score for the current document when the script is run. */ + private Scorable scorer; + + /** + * A leaf lookup for the bound segment this script will operate on. + */ + private final LeafSearchLookup leafLookup; + + AbstractSortScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { + this.leafLookup = lookup.getLeafSearchLookup(leafContext); + Map parameters = new HashMap<>(params); + parameters.putAll(leafLookup.asMap()); + this.params = new ParameterMap(parameters, DEPRECATIONS); + } + + protected AbstractSortScript() { + this.params = null; + this.leafLookup = null; + } + + /** + * Return the parameters for this script. + */ + public Map getParams() { + return params; + } + + @Override + public void setScorer(Scorable scorer) { + this.scorer = scorer; + } + + /** Return the score of the current document. */ + public double get_score() { + try { + return scorer.score(); + } catch (IOException e) { + throw new ElasticsearchException("couldn't lookup score", e); + } + } + + /** + * The doc lookup for the Lucene segment this script was created for. + */ + public Map> getDoc() { + return leafLookup.doc(); + } + + /** + * Set the current document to run the script on next. + */ + public void setDocument(int docid) { + leafLookup.setDocument(docid); + } +} diff --git a/server/src/main/java/org/elasticsearch/script/NumberSortScript.java b/server/src/main/java/org/elasticsearch/script/NumberSortScript.java new file mode 100644 index 00000000000..d0b3fdbed36 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/NumberSortScript.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.script; + +import java.io.IOException; +import java.util.Map; +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.search.lookup.SearchLookup; + +public abstract class NumberSortScript extends AbstractSortScript { + + public static final String[] PARAMETERS = {}; + + public static final ScriptContext CONTEXT = new ScriptContext<>("number_sort", Factory.class); + + public NumberSortScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { + super(params, lookup, leafContext); + } + + protected NumberSortScript() { + super(); + } + + public abstract double execute(); + + /** + * A factory to construct {@link NumberSortScript} instances. + */ + public interface LeafFactory { + NumberSortScript newInstance(LeafReaderContext ctx) throws IOException; + + /** + * Return {@code true} if the script needs {@code _score} calculated, or {@code false} otherwise. + */ + boolean needs_score(); + } + + /** + * A factory to construct stateful {@link NumberSortScript} factories for a specific index. + */ + public interface Factory { + LeafFactory newFactory(Map params, SearchLookup lookup); + } +} diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index ddc090251a3..24dd491e3a1 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -43,7 +43,8 @@ public class ScriptModule { SearchScript.CONTEXT, AggregationScript.CONTEXT, ScoreScript.CONTEXT, - SearchScript.SCRIPT_SORT_CONTEXT, + NumberSortScript.CONTEXT, + StringSortScript.CONTEXT, TermsSetQueryScript.CONTEXT, UpdateScript.CONTEXT, BucketAggregationScript.CONTEXT, diff --git a/server/src/main/java/org/elasticsearch/script/SearchScript.java b/server/src/main/java/org/elasticsearch/script/SearchScript.java index 496af2fbdd5..2fd439564a6 100644 --- a/server/src/main/java/org/elasticsearch/script/SearchScript.java +++ b/server/src/main/java/org/elasticsearch/script/SearchScript.java @@ -139,7 +139,4 @@ public abstract class SearchScript implements ScorerAware { /** The context used to compile {@link SearchScript} factories. */ public static final ScriptContext CONTEXT = new ScriptContext<>("search", Factory.class); - // TODO: remove these contexts when it has its own interface - // Can return a double. (For ScriptSortType#NUMBER only, for ScriptSortType#STRING normal CONTEXT should be used) - public static final ScriptContext SCRIPT_SORT_CONTEXT = new ScriptContext<>("sort", Factory.class); } diff --git a/server/src/main/java/org/elasticsearch/script/StringSortScript.java b/server/src/main/java/org/elasticsearch/script/StringSortScript.java new file mode 100644 index 00000000000..1c6c47dd215 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/StringSortScript.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.script; + +import java.io.IOException; +import java.util.Map; +import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.search.lookup.SearchLookup; + +public abstract class StringSortScript extends AbstractSortScript { + + public static final String[] PARAMETERS = {}; + + public static final ScriptContext CONTEXT = new ScriptContext<>("string_sort", Factory.class); + + public StringSortScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { + super(params, lookup, leafContext); + } + + public abstract String execute(); + + /** + * A factory to construct {@link StringSortScript} instances. + */ + public interface LeafFactory { + StringSortScript newInstance(LeafReaderContext ctx) throws IOException; + } + + /** + * A factory to construct stateful {@link StringSortScript} factories for a specific index. + */ + public interface Factory { + LeafFactory newFactory(Map params, SearchLookup lookup); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index e425755a55e..95478e08324 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -51,7 +50,8 @@ import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; +import org.elasticsearch.script.NumberSortScript; +import org.elasticsearch.script.StringSortScript; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; @@ -305,9 +305,6 @@ public class ScriptSortBuilder extends SortBuilder { @Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { - final SearchScript.Factory factory = context.getScriptService().compile(script, SearchScript.SCRIPT_SORT_CONTEXT); - final SearchScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup()); - MultiValueMode valueMode = null; if (sortMode != null) { valueMode = MultiValueMode.fromString(sortMode.toString()); @@ -336,8 +333,10 @@ public class ScriptSortBuilder extends SortBuilder { final IndexFieldData.XFieldComparatorSource fieldComparatorSource; switch (type) { case STRING: + final StringSortScript.Factory factory = context.getScriptService().compile(script, StringSortScript.CONTEXT); + final StringSortScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup()); fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, valueMode, nested) { - SearchScript leafScript; + StringSortScript leafScript; @Override protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException { leafScript = searchScript.newInstance(context); @@ -350,9 +349,7 @@ public class ScriptSortBuilder extends SortBuilder { } @Override public BytesRef binaryValue() { - final Object run = leafScript.run(); - CollectionUtils.ensureNoSelfReferences(run, "ScriptSortBuilder leaf script"); - spare.copyChars(run.toString()); + spare.copyChars(leafScript.execute()); return spare.get(); } }; @@ -365,11 +362,13 @@ public class ScriptSortBuilder extends SortBuilder { }; break; case NUMBER: + final NumberSortScript.Factory numberSortFactory = context.getScriptService().compile(script, NumberSortScript.CONTEXT); + final NumberSortScript.LeafFactory numberSortScript = numberSortFactory.newFactory(script.getParams(), context.lookup()); fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) { - SearchScript leafScript; + NumberSortScript leafScript; @Override protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws IOException { - leafScript = searchScript.newInstance(context); + leafScript = numberSortScript.newInstance(context); final NumericDoubleValues values = new NumericDoubleValues() { @Override public boolean advanceExact(int doc) throws IOException { @@ -378,7 +377,7 @@ public class ScriptSortBuilder extends SortBuilder { } @Override public double doubleValue() { - return leafScript.runAsDouble(); + return leafScript.execute(); } }; return FieldData.singleton(values); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index 7823cd8849c..c75d8bb054f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -274,6 +274,7 @@ public class RolloverIT extends ESIntegTestCase { public void testRolloverWithDateMath() { ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); + assumeTrue("only works on the same day", now.plusMinutes(5).getDayOfYear() == now.getDayOfYear()); String index = "test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-1"; String dateMathExp = ""; assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get()); diff --git a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java index 0ba83d649a7..0bb72a4050d 100644 --- a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java +++ b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java @@ -19,6 +19,7 @@ package org.elasticsearch.discovery; +import java.nio.file.Path; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -33,7 +34,8 @@ import org.elasticsearch.discovery.zen.ZenPing; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.NodeConfigurationSource; import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.NetworkDisruption.Bridge; @@ -52,9 +54,9 @@ import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; -import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.discovery.DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -62,7 +64,7 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase { static final TimeValue DISRUPTION_HEALING_OVERHEAD = TimeValue.timeValueSeconds(40); // we use 30s as timeout in many places. - private ClusterDiscoveryConfiguration discoveryConfig; + private NodeConfigurationSource discoveryConfig; @Override protected Settings nodeSettings(int nodeOrdinal) { @@ -116,18 +118,14 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase { } } - List startCluster(int numberOfNodes) throws ExecutionException, InterruptedException { + List startCluster(int numberOfNodes) { return startCluster(numberOfNodes, -1); } - List startCluster(int numberOfNodes, int minimumMasterNode) throws ExecutionException, InterruptedException { - return startCluster(numberOfNodes, minimumMasterNode, null); - } - - List startCluster(int numberOfNodes, int minimumMasterNode, @Nullable int[] unicastHostsOrdinals) throws - ExecutionException, InterruptedException { - configureCluster(numberOfNodes, unicastHostsOrdinals, minimumMasterNode); - List nodes = internalCluster().startNodes(numberOfNodes); + List startCluster(int numberOfNodes, int minimumMasterNode) { + configureCluster(numberOfNodes, minimumMasterNode); + InternalTestCluster internalCluster = internalCluster(); + List nodes = internalCluster.startNodes(numberOfNodes); ensureStableCluster(numberOfNodes); // TODO: this is a temporary solution so that nodes will not base their reaction to a partition based on previous successful results @@ -154,20 +152,11 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase { return Arrays.asList(MockTransportService.TestPlugin.class); } - void configureCluster( - int numberOfNodes, - @Nullable int[] unicastHostsOrdinals, - int minimumMasterNode - ) throws ExecutionException, InterruptedException { - configureCluster(DEFAULT_SETTINGS, numberOfNodes, unicastHostsOrdinals, minimumMasterNode); + void configureCluster(int numberOfNodes, int minimumMasterNode) { + configureCluster(DEFAULT_SETTINGS, numberOfNodes, minimumMasterNode); } - void configureCluster( - Settings settings, - int numberOfNodes, - @Nullable int[] unicastHostsOrdinals, - int minimumMasterNode - ) throws ExecutionException, InterruptedException { + void configureCluster(Settings settings, int numberOfNodes, int minimumMasterNode) { if (minimumMasterNode < 0) { minimumMasterNode = numberOfNodes / 2 + 1; } @@ -177,14 +166,21 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase { .put(settings) .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numberOfNodes) .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), minimumMasterNode) + .putList(DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "file") .build(); if (discoveryConfig == null) { - if (unicastHostsOrdinals == null) { - discoveryConfig = new ClusterDiscoveryConfiguration.UnicastZen(numberOfNodes, nodeSettings); - } else { - discoveryConfig = new ClusterDiscoveryConfiguration.UnicastZen(numberOfNodes, nodeSettings, unicastHostsOrdinals); - } + discoveryConfig = new NodeConfigurationSource() { + @Override + public Settings nodeSettings(final int nodeOrdinal) { + return nodeSettings; + } + + @Override + public Path nodeConfigPath(final int nodeOrdinal) { + return null; + } + }; } } diff --git a/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java index 3b08eb6870e..b35bf8444e9 100644 --- a/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java @@ -363,7 +363,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase { */ public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Exception { // don't use DEFAULT settings (which can cause node disconnects on a slow CI machine) - configureCluster(Settings.EMPTY, 3, null, 1); + configureCluster(Settings.EMPTY, 3, 1); internalCluster().startMasterOnlyNode(); final String node_1 = internalCluster().startDataOnlyNode(); @@ -390,7 +390,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase { public void testIndexImportedFromDataOnlyNodesIfMasterLostDataFolder() throws Exception { // test for https://github.com/elastic/elasticsearch/issues/8823 - configureCluster(2, null, 1); + configureCluster(2, 1); String masterNode = internalCluster().startMasterOnlyNode(Settings.EMPTY); internalCluster().startDataOnlyNode(Settings.EMPTY); @@ -421,7 +421,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase { .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "30s") // wait till cluster state is committed .build(); final String idxName = "test"; - configureCluster(settings, 3, null, 2); + configureCluster(settings, 3, 2); final List allMasterEligibleNodes = internalCluster().startMasterOnlyNodes(2); final String dataNode = internalCluster().startDataOnlyNode(); ensureStableCluster(3); diff --git a/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java index 4d190921f2d..2c7f17468ac 100644 --- a/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java @@ -59,7 +59,8 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase { public void testIsolatedUnicastNodes() throws Exception { - List nodes = startCluster(4, -1, new int[]{0}); + internalCluster().setHostsListContainsOnlyFirstNode(true); + List nodes = startCluster(4, -1); // Figure out what is the elected master node final String unicastTarget = nodes.get(0); @@ -98,7 +99,8 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase { * The rejoining node should take this master node and connect. */ public void testUnicastSinglePingResponseContainsMaster() throws Exception { - List nodes = startCluster(4, -1, new int[]{0}); + internalCluster().setHostsListContainsOnlyFirstNode(true); + List nodes = startCluster(4, -1); // Figure out what is the elected master node final String masterNode = internalCluster().getMasterName(); logger.info("---> legit elected master node={}", masterNode); @@ -194,7 +196,7 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase { } public void testClusterFormingWithASlowNode() throws Exception { - configureCluster(3, null, 2); + configureCluster(3, 2); SlowClusterStateProcessing disruption = new SlowClusterStateProcessing(random(), 0, 0, 1000, 2000); @@ -210,7 +212,7 @@ public class DiscoveryDisruptionIT extends AbstractDisruptionTestCase { } public void testElectMasterWithLatestVersion() throws Exception { - configureCluster(3, null, 2); + configureCluster(3, 2); final Set nodes = new HashSet<>(internalCluster().startNodes(3)); ensureStableCluster(3); ServiceDisruptionScheme isolateAllNodes = diff --git a/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java index d8390a6c688..474848a834e 100644 --- a/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/SnapshotDisruptionIT.java @@ -60,7 +60,7 @@ public class SnapshotDisruptionIT extends AbstractDisruptionTestCase { .put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "30s") // wait till cluster state is committed .build(); final String idxName = "test"; - configureCluster(settings, 4, null, 2); + configureCluster(settings, 4, 2); final List allMasterEligibleNodes = internalCluster().startMasterOnlyNodes(3); final String dataNode = internalCluster().startDataOnlyNode(); ensureStableCluster(4); diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 9e89bf2b59d..64a9d97e3f4 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -105,6 +105,47 @@ public class MockScriptEngine implements ScriptEngine { } }; return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(NumberSortScript.class)) { + NumberSortScript.Factory factory = (parameters, lookup) -> new NumberSortScript.LeafFactory() { + @Override + public NumberSortScript newInstance(final LeafReaderContext ctx) { + return new NumberSortScript(parameters, lookup, ctx) { + @Override + public double execute() { + Map vars = new HashMap<>(parameters); + vars.put("params", parameters); + vars.put("doc", getDoc()); + return ((Number) script.apply(vars)).doubleValue(); + } + }; + } + + @Override + public boolean needs_score() { + return false; + } + }; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(StringSortScript.class)) { + StringSortScript.Factory factory = (parameters, lookup) -> (StringSortScript.LeafFactory) ctx + -> new StringSortScript(parameters, lookup, ctx) { + @Override + public String execute() { + Map vars = new HashMap<>(parameters); + vars.put("params", parameters); + vars.put("doc", getDoc()); + return String.valueOf(script.apply(vars)); + } + }; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(IngestScript.class)) { + IngestScript.Factory factory = vars -> new IngestScript(vars) { + @Override + public void execute(Map ctx) { + script.apply(ctx); + } + }; + return context.factoryClazz.cast(factory); } else if(context.instanceClazz.equals(AggregationScript.class)) { AggregationScript.Factory factory = (parameters, lookup) -> new AggregationScript.LeafFactory() { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 5d6a1ebe6ed..6d1b80e0364 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -232,6 +232,9 @@ public final class InternalTestCluster extends TestCluster { private ServiceDisruptionScheme activeDisruptionScheme; private Function clientWrapper; + // If set to true only the first node in the cluster will be made a unicast node + private boolean hostsListContainsOnlyFirstNode; + public InternalTestCluster( final long clusterSeed, final Path baseDir, @@ -1613,12 +1616,17 @@ public final class InternalTestCluster extends TestCluster { private final Object discoveryFileMutex = new Object(); - private void rebuildUnicastHostFiles(Collection newNodes) { + private void rebuildUnicastHostFiles(List newNodes) { // cannot be a synchronized method since it's called on other threads from within synchronized startAndPublishNodesAndClients() synchronized (discoveryFileMutex) { try { - List discoveryFileContents = Stream.concat(nodes.values().stream(), newNodes.stream()) - .map(nac -> nac.node.injector().getInstance(TransportService.class)).filter(Objects::nonNull) + Stream unicastHosts = Stream.concat(nodes.values().stream(), newNodes.stream()); + if (hostsListContainsOnlyFirstNode) { + unicastHosts = unicastHosts.limit(1L); + } + List discoveryFileContents = unicastHosts.map( + nac -> nac.node.injector().getInstance(TransportService.class) + ).filter(Objects::nonNull) .map(TransportService::getLocalNode).filter(Objects::nonNull).filter(DiscoveryNode::isMasterNode) .map(n -> n.getAddress().toString()) .distinct().collect(Collectors.toList()); @@ -2050,6 +2058,9 @@ public final class InternalTestCluster extends TestCluster { return filterNodes(nodes, NodeAndClient::isMasterEligible).size(); } + public void setHostsListContainsOnlyFirstNode(boolean hostsListContainsOnlyFirstNode) { + this.hostsListContainsOnlyFirstNode = hostsListContainsOnlyFirstNode; + } public void setDisruptionScheme(ServiceDisruptionScheme scheme) { assert activeDisruptionScheme == null : diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java deleted file mode 100644 index a63ba22bb51..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.discovery; - -import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.carrotsearch.randomizedtesting.SysGlobals; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.network.NetworkUtils; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.mocksocket.MockServerSocket; -import org.elasticsearch.test.NodeConfigurationSource; -import org.elasticsearch.transport.TcpTransport; - -import java.io.IOException; -import java.net.InetSocketAddress; -import java.net.ServerSocket; -import java.nio.file.Path; -import java.util.HashSet; -import java.util.Set; - -public class ClusterDiscoveryConfiguration extends NodeConfigurationSource { - - /** - * The number of ports in the range used for this JVM - */ - private static final int PORTS_PER_JVM = 100; - - private static final int JVM_ORDINAL = Integer.parseInt(System.getProperty(SysGlobals.CHILDVM_SYSPROP_JVM_ID, "0")); - - /** - * a per-JVM unique offset to be used for calculating unique port ranges. - */ - private static final int JVM_BASE_PORT_OFFSET = PORTS_PER_JVM * (JVM_ORDINAL + 1); - - - static Settings DEFAULT_NODE_SETTINGS = Settings.EMPTY; - private static final String IP_ADDR = "127.0.0.1"; - - final int numOfNodes; - final Settings nodeSettings; - final Settings transportClientSettings; - - public ClusterDiscoveryConfiguration(int numOfNodes, Settings extraSettings) { - this.numOfNodes = numOfNodes; - this.nodeSettings = Settings.builder().put(DEFAULT_NODE_SETTINGS).put(extraSettings).build(); - this.transportClientSettings = Settings.builder().put(extraSettings).build(); - } - - @Override - public Settings nodeSettings(int nodeOrdinal) { - return nodeSettings; - } - - @Override - public Path nodeConfigPath(int nodeOrdinal) { - return null; - } - - @Override - public Settings transportClientSettings() { - return transportClientSettings; - } - - public static class UnicastZen extends ClusterDiscoveryConfiguration { - - // this variable is incremented on each bind attempt and will maintain the next port that should be tried - private static int nextPort = calcBasePort(); - - private final int[] unicastHostOrdinals; - private final int[] unicastHostPorts; - - public UnicastZen(int numOfNodes, Settings extraSettings) { - this(numOfNodes, numOfNodes, extraSettings); - } - - public UnicastZen(int numOfNodes, int numOfUnicastHosts, Settings extraSettings) { - super(numOfNodes, extraSettings); - if (numOfUnicastHosts == numOfNodes) { - unicastHostOrdinals = new int[numOfNodes]; - for (int i = 0; i < numOfNodes; i++) { - unicastHostOrdinals[i] = i; - } - } else { - Set ordinals = new HashSet<>(numOfUnicastHosts); - while (ordinals.size() != numOfUnicastHosts) { - ordinals.add(RandomizedTest.randomInt(numOfNodes - 1)); - } - unicastHostOrdinals = CollectionUtils.toArray(ordinals); - } - this.unicastHostPorts = unicastHostPorts(numOfNodes); - assert unicastHostOrdinals.length <= unicastHostPorts.length; - } - - public UnicastZen(int numOfNodes, int[] unicastHostOrdinals) { - this(numOfNodes, Settings.EMPTY, unicastHostOrdinals); - } - - public UnicastZen(int numOfNodes, Settings extraSettings, int[] unicastHostOrdinals) { - super(numOfNodes, extraSettings); - this.unicastHostOrdinals = unicastHostOrdinals; - this.unicastHostPorts = unicastHostPorts(numOfNodes); - assert unicastHostOrdinals.length <= unicastHostPorts.length; - } - - private static int calcBasePort() { - return 30000 + JVM_BASE_PORT_OFFSET; - } - - @Override - public Settings nodeSettings(int nodeOrdinal) { - Settings.Builder builder = Settings.builder().put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), numOfNodes); - - String[] unicastHosts = new String[unicastHostOrdinals.length]; - if (nodeOrdinal >= unicastHostPorts.length) { - throw new ElasticsearchException("nodeOrdinal [" + nodeOrdinal + "] is greater than the number unicast ports [" - + unicastHostPorts.length + "]"); - } else { - // we need to pin the node port & host so we'd know where to point things - builder.put(TcpTransport.PORT.getKey(), unicastHostPorts[nodeOrdinal]); - builder.put(TcpTransport.HOST.getKey(), IP_ADDR); // only bind on one IF we use v4 here by default - for (int i = 0; i < unicastHostOrdinals.length; i++) { - unicastHosts[i] = IP_ADDR + ":" + (unicastHostPorts[unicastHostOrdinals[i]]); - } - } - builder.putList("discovery.zen.ping.unicast.hosts", unicastHosts); - return builder.put(super.nodeSettings(nodeOrdinal)).build(); - } - - @SuppressForbidden(reason = "we know we pass a IP address") - protected static synchronized int[] unicastHostPorts(int numHosts) { - int[] unicastHostPorts = new int[numHosts]; - - final int basePort = calcBasePort(); - final int maxPort = basePort + PORTS_PER_JVM; - int tries = 0; - for (int i = 0; i < unicastHostPorts.length; i++) { - boolean foundPortInRange = false; - while (tries < PORTS_PER_JVM && !foundPortInRange) { - try (ServerSocket serverSocket = new MockServerSocket()) { - // Set SO_REUSEADDR as we may bind here and not be able to reuse the address immediately without it. - serverSocket.setReuseAddress(NetworkUtils.defaultReuseAddress()); - serverSocket.bind(new InetSocketAddress(IP_ADDR, nextPort)); - // bind was a success - foundPortInRange = true; - unicastHostPorts[i] = nextPort; - } catch (IOException e) { - // Do nothing - } - - nextPort++; - if (nextPort >= maxPort) { - // Roll back to the beginning of the range and do not go into another JVM's port range - nextPort = basePort; - } - tries++; - } - - if (!foundPortInRange) { - throw new ElasticsearchException("could not find enough open ports in range [" + basePort + "-" + maxPort - + "]. required [" + unicastHostPorts.length + "] ports"); - } - } - return unicastHostPorts; - } - } -} diff --git a/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java b/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java index b8d0c915899..2ff638ebca0 100644 --- a/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java +++ b/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java @@ -110,22 +110,12 @@ public class ChainIT extends ESRestTestCase { assertOK(client().performRequest(new Request("POST", "/" + index + "/_refresh"))); } - private static void resumeFollow(String leaderIndex, String followIndex) throws IOException { - final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow"); - request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}"); - assertOK(client().performRequest(request)); - } - private static void followIndex(String leaderIndex, String followIndex) throws IOException { final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); request.setJsonEntity("{\"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}"); assertOK(client().performRequest(request)); } - private static void pauseFollow(String followIndex) throws IOException { - assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/pause_follow"))); - } - private static void verifyDocuments(String index, int expectedNumDocs) throws IOException { verifyDocuments(index, expectedNumDocs, client()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java index a784922228b..d0de0e02038 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.rollup; import org.elasticsearch.common.ParseField; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; @@ -15,7 +16,9 @@ import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilde import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -34,8 +37,19 @@ public class RollupField { public static final String TYPE_NAME = "_doc"; public static final String AGG = "agg"; public static final String ROLLUP_MISSING = "ROLLUP_MISSING_40710B25931745D4B0B8B310F6912A69"; - public static final List SUPPORTED_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME, + public static final List SUPPORTED_NUMERIC_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME, SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME); + public static final List SUPPORTED_DATE_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, + MinAggregationBuilder.NAME, + ValueCountAggregationBuilder.NAME); + + // a set of ALL our supported metrics, to be a union of all other supported metric types (numeric, date, etc.) + public static final Set SUPPORTED_METRICS; + static { + SUPPORTED_METRICS = new HashSet<>(); + SUPPORTED_METRICS.addAll(SUPPORTED_NUMERIC_METRICS); + SUPPORTED_METRICS.addAll(SUPPORTED_DATE_METRICS); + } // these mapper types are used by the configs (metric, histo, etc) to validate field mappings public static final List NUMERIC_FIELD_MAPPER_TYPES; @@ -47,6 +61,8 @@ public class RollupField { NUMERIC_FIELD_MAPPER_TYPES = types; } + public static final String DATE_FIELD_MAPPER_TYPE = DateFieldMapper.CONTENT_TYPE; + /** * Format to the appropriate Rollup field name convention * diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java index 3a267e4cfa4..46f0c7397c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.rollup.RollupField; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -108,18 +109,24 @@ public class MetricConfig implements Writeable, ToXContentObject { Map fieldCaps = fieldCapsResponse.get(field); if (fieldCaps != null && fieldCaps.isEmpty() == false) { fieldCaps.forEach((key, value) -> { + if (value.isAggregatable() == false) { + validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " + + "but is not."); + } if (RollupField.NUMERIC_FIELD_MAPPER_TYPES.contains(key)) { - if (value.isAggregatable() == false) { - validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " + - "but is not."); + // nothing to do as all metrics are supported by SUPPORTED_NUMERIC_METRICS currently + } else if (RollupField.DATE_FIELD_MAPPER_TYPE.equals(key)) { + if (RollupField.SUPPORTED_DATE_METRICS.containsAll(metrics) == false) { + validationException.addValidationError( + buildSupportedMetricError("date", RollupField.SUPPORTED_DATE_METRICS)); } } else { - validationException.addValidationError("The field referenced by a metric group must be a [numeric] type, but found " + - fieldCaps.keySet().toString() + " for field [" + field + "]"); + validationException.addValidationError("The field referenced by a metric group must be a [numeric] or [date] type, " + + "but found " + fieldCaps.keySet().toString() + " for field [" + field + "]"); } }); } else { - validationException.addValidationError("Could not find a [numeric] field with name [" + field + "] in any of the " + + validationException.addValidationError("Could not find a [numeric] or [date] field with name [" + field + "] in any of the " + "indices matching the index pattern."); } } @@ -166,4 +173,11 @@ public class MetricConfig implements Writeable, ToXContentObject { public static MetricConfig fromXContent(final XContentParser parser) throws IOException { return PARSER.parse(parser, null); } + + private String buildSupportedMetricError(String type, List supportedMetrics) { + List unsupportedMetrics = new ArrayList<>(metrics); + unsupportedMetrics.removeAll(supportedMetrics); + return "Only the metrics " + supportedMetrics + " are supported for [" + type + "] types," + + " but unsupported metrics " + unsupportedMetrics + " supplied for field [" + field + "]"; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java index c26b66875e6..a63c3b0dc8c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/APMSystemUser.java @@ -16,7 +16,6 @@ public class APMSystemUser extends User { public static final String NAME = UsernamesField.APM_NAME; public static final String ROLE_NAME = UsernamesField.APM_ROLE; public static final Version DEFINED_SINCE = Version.V_6_5_0; - public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE); public APMSystemUser(boolean enabled) { super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BeatsSystemUser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BeatsSystemUser.java index dfa437fa8d2..3daf242d520 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BeatsSystemUser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BeatsSystemUser.java @@ -16,7 +16,6 @@ public class BeatsSystemUser extends User { public static final String NAME = UsernamesField.BEATS_NAME; public static final String ROLE_NAME = UsernamesField.BEATS_ROLE; public static final Version DEFINED_SINCE = Version.V_6_3_0; - public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE); public BeatsSystemUser(boolean enabled) { super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BuiltinUserInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BuiltinUserInfo.java deleted file mode 100644 index 0ecd457ba09..00000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/BuiltinUserInfo.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.security.user; - -import org.elasticsearch.Version; - -/** - * BuiltinUserInfo provides common user meta data for newly introduced pre defined System Users. - */ -public class BuiltinUserInfo { - private final String name; - private final String role; - private final Version definedSince; - - public BuiltinUserInfo(String name, String role, Version definedSince) { - this.name = name; - this.role = role; - this.definedSince = definedSince; - } - - /** Get the builtin users name. */ - public String getName() { - return name; - } - - /** Get the builtin users default role name. */ - public String getRole() { - return role; - } - - /** Get version the builtin user was introduced with. */ - public Version getDefinedSince() { - return definedSince; - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/MetricConfigSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/MetricConfigSerializingTests.java index a5b8d9afead..bc2c9c3157b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/MetricConfigSerializingTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/MetricConfigSerializingTests.java @@ -11,14 +11,17 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; +import org.elasticsearch.xpack.core.rollup.RollupField; import java.io.IOException; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.isIn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -45,8 +48,8 @@ public class MetricConfigSerializingTests extends AbstractSerializingTestCase> responseMap = new HashMap<>(); + + // Have to mock fieldcaps because the ctor's aren't public... + FieldCapabilities fieldCaps = mock(FieldCapabilities.class); + when(fieldCaps.isAggregatable()).thenReturn(true); + responseMap.put("my_field", Collections.singletonMap("date", fieldCaps)); + + MetricConfig config = new MetricConfig("my_field", Arrays.asList("avg", "max")); + config.validateMappings(responseMap, e); + assertThat(e.validationErrors().get(0), equalTo("Only the metrics " + RollupField.SUPPORTED_DATE_METRICS.toString() + + " are supported for [date] types, but unsupported metrics [avg] supplied for field [my_field]")); + } + public void testValidateMatchingField() { ActionRequestValidationException e = new ActionRequestValidationException(); Map> responseMap = new HashMap<>(); @@ -153,6 +171,13 @@ public class MetricConfigSerializingTests extends AbstractSerializingTestCase new ParameterizedMessage("Removing old tokens: [{}]", Strings.toString(expiredDbq))); executeAsyncWithOrigin(client, SECURITY_ORIGIN, DeleteByQueryAction.INSTANCE, expiredDbq, ActionListener.wrap(r -> { debugDbqResponse(r); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 4f1ec4ad8c0..6ac7f687e24 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -8,11 +8,8 @@ package org.elasticsearch.xpack.security.authc; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.Priority; -import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.UnicodeUtil; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; @@ -36,9 +33,12 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.ack.AckedRequest; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.cache.Cache; @@ -60,15 +60,16 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.security.ScrollHelper; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.KeyAndTimestamp; @@ -114,6 +115,7 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Consumer; import java.util.function.Supplier; import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; @@ -159,6 +161,7 @@ public final class TokenService extends AbstractComponent { static final String INVALIDATED_TOKEN_DOC_TYPE = "invalidated-token"; static final int MINIMUM_BYTES = VERSION_BYTES + SALT_BYTES + IV_BYTES + 1; private static final int MINIMUM_BASE64_BYTES = Double.valueOf(Math.ceil((4 * MINIMUM_BYTES) / 3)).intValue(); + private static final int MAX_RETRY_ATTEMPTS = 5; private final SecureRandom secureRandom = new SecureRandom(); private final ClusterService clusterService; @@ -217,9 +220,10 @@ public final class TokenService extends AbstractComponent { boolean includeRefreshToken) throws IOException { ensureEnabled(); if (authentication == null) { - listener.onFailure(new IllegalArgumentException("authentication must be provided")); + listener.onFailure(traceLog("create token", null, new IllegalArgumentException("authentication must be provided"))); } else if (originatingClientAuth == null) { - listener.onFailure(new IllegalArgumentException("originating client authentication must be provided")); + listener.onFailure(traceLog("create token", null, + new IllegalArgumentException("originating client authentication must be provided"))); } else { final Instant created = clock.instant(); final Instant expiration = getExpirationTime(created); @@ -252,16 +256,17 @@ public final class TokenService extends AbstractComponent { .field("realm", authentication.getAuthenticatedBy().getName()) .endObject(); builder.endObject(); + final String documentId = getTokenDocumentId(userToken); IndexRequest request = - client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) + client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, documentId) .setOpType(OpType.CREATE) .setSource(builder) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .request(); - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> - executeAsyncWithOrigin(client, SECURITY_ORIGIN, IndexAction.INSTANCE, request, - ActionListener.wrap(indexResponse -> listener.onResponse(new Tuple<>(userToken, refreshToken)), - listener::onFailure)) + securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", documentId, ex)), + () -> executeAsyncWithOrigin(client, SECURITY_ORIGIN, IndexAction.INSTANCE, request, + ActionListener.wrap(indexResponse -> listener.onResponse(new Tuple<>(userToken, refreshToken)), + listener::onFailure)) ); } } @@ -323,7 +328,7 @@ public final class TokenService extends AbstractComponent { Instant currentTime = clock.instant(); if (currentTime.isAfter(userToken.getExpirationTime())) { // token expired - listener.onFailure(expiredTokenException()); + listener.onFailure(traceLog("decode token", token, expiredTokenException())); } else { checkIfTokenIsRevoked(userToken, listener); } @@ -361,43 +366,50 @@ public final class TokenService extends AbstractComponent { final Cipher cipher = getDecryptionCipher(iv, decodeKey, version, decodedSalt); if (version.onOrAfter(Version.V_6_2_0)) { // we only have the id and need to get the token from the doc! - decryptTokenId(in, cipher, version, ActionListener.wrap(tokenId -> - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { - final GetRequest getRequest = - client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, + decryptTokenId(in, cipher, version, ActionListener.wrap(tokenId -> { + if (securityIndex.isAvailable() == false) { + logger.warn("failed to get token [{}] since index is not available", tokenId); + listener.onResponse(null); + } else { + securityIndex.checkIndexVersionThenExecute( + ex -> listener.onFailure(traceLog("prepare security index", tokenId, ex)), + () -> { + final GetRequest getRequest = client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(tokenId)).request(); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, - ActionListener.wrap(response -> { - if (response.isExists()) { - Map accessTokenSource = + Consumer onFailure = ex -> listener.onFailure(traceLog("decode token", tokenId, ex)); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, + ActionListener.wrap(response -> { + if (response.isExists()) { + Map accessTokenSource = (Map) response.getSource().get("access_token"); - if (accessTokenSource == null) { - listener.onFailure(new IllegalStateException("token document is missing " + - "the access_token field")); - } else if (accessTokenSource.containsKey("user_token") == false) { - listener.onFailure(new IllegalStateException("token document is missing " + - "the user_token field")); - } else { - Map userTokenSource = + if (accessTokenSource == null) { + onFailure.accept(new IllegalStateException( + "token document is missing the access_token field")); + } else if (accessTokenSource.containsKey("user_token") == false) { + onFailure.accept(new IllegalStateException( + "token document is missing the user_token field")); + } else { + Map userTokenSource = (Map) accessTokenSource.get("user_token"); - listener.onResponse(UserToken.fromSourceMap(userTokenSource)); - } - } else { - listener.onFailure( + listener.onResponse(UserToken.fromSourceMap(userTokenSource)); + } + } else { + onFailure.accept( new IllegalStateException("token document is missing and must be present")); - } - }, e -> { - // if the index or the shard is not there / available we assume that - // the token is not valid - if (isShardNotAvailableException(e)) { - logger.warn("failed to get token [{}] since index is not available", tokenId); - listener.onResponse(null); - } else { - logger.error(new ParameterizedMessage("failed to get token [{}]", tokenId), e); - listener.onFailure(e); - } - }), client::get); - }), listener::onFailure)); + } + }, e -> { + // if the index or the shard is not there / available we assume that + // the token is not valid + if (isShardNotAvailableException(e)) { + logger.warn("failed to get token [{}] since index is not available", tokenId); + listener.onResponse(null); + } else { + logger.error(new ParameterizedMessage("failed to get token [{}]", tokenId), e); + listener.onFailure(e); + } + }), client::get); + }); + }}, listener::onFailure)); } else { decryptToken(in, cipher, version, listener); } @@ -461,13 +473,14 @@ public final class TokenService extends AbstractComponent { public void invalidateAccessToken(String tokenString, ActionListener listener) { ensureEnabled(); if (Strings.isNullOrEmpty(tokenString)) { + logger.trace("No token-string provided"); listener.onFailure(new IllegalArgumentException("token must be provided")); } else { maybeStartTokenRemover(); try { decodeToken(tokenString, ActionListener.wrap(userToken -> { if (userToken == null) { - listener.onFailure(malformedTokenException()); + listener.onFailure(traceLog("invalidate token", tokenString, malformedTokenException())); } else { final long expirationEpochMilli = getExpirationTime().toEpochMilli(); indexBwcInvalidation(userToken, listener, new AtomicInteger(0), expirationEpochMilli); @@ -488,6 +501,7 @@ public final class TokenService extends AbstractComponent { public void invalidateAccessToken(UserToken userToken, ActionListener listener) { ensureEnabled(); if (userToken == null) { + logger.trace("No access token provided"); listener.onFailure(new IllegalArgumentException("token must be provided")); } else { maybeStartTokenRemover(); @@ -499,6 +513,7 @@ public final class TokenService extends AbstractComponent { public void invalidateRefreshToken(String refreshToken, ActionListener listener) { ensureEnabled(); if (Strings.isNullOrEmpty(refreshToken)) { + logger.trace("No refresh token provided"); listener.onFailure(new IllegalArgumentException("refresh token must be provided")); } else { maybeStartTokenRemover(); @@ -521,7 +536,8 @@ public final class TokenService extends AbstractComponent { */ private void indexBwcInvalidation(UserToken userToken, ActionListener listener, AtomicInteger attemptCount, long expirationEpochMilli) { - if (attemptCount.get() > 5) { + if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { + logger.warn("Failed to invalidate token [{}] after [{}] attempts", userToken.getId(), attemptCount.get()); listener.onFailure(invalidGrantException("failed to invalidate token")); } else { final String invalidatedTokenId = getInvalidatedTokenDocumentId(userToken); @@ -532,14 +548,15 @@ public final class TokenService extends AbstractComponent { .request(); final String tokenDocId = getTokenDocumentId(userToken); final Version version = userToken.getVersion(); - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, indexRequest, + securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", tokenDocId, ex)), + () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, indexRequest, ActionListener.wrap(indexResponse -> { ActionListener wrappedListener = ActionListener.wrap(ignore -> listener.onResponse(true), listener::onFailure); indexInvalidation(tokenDocId, version, wrappedListener, attemptCount, "access_token", 1L); }, e -> { Throwable cause = ExceptionsHelper.unwrapCause(e); + traceLog("(bwc) invalidate token", tokenDocId, cause); if (cause instanceof VersionConflictEngineException) { // expected since something else could have invalidated ActionListener wrappedListener = @@ -566,7 +583,8 @@ public final class TokenService extends AbstractComponent { */ private void indexInvalidation(String tokenDocId, Version version, ActionListener listener, AtomicInteger attemptCount, String srcPrefix, long documentVersion) { - if (attemptCount.get() > 5) { + if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { + logger.warn("Failed to invalidate token [{}] after [{}] attempts", tokenDocId, attemptCount.get()); listener.onFailure(invalidGrantException("failed to invalidate token")); } else { UpdateRequest request = client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId) @@ -574,75 +592,79 @@ public final class TokenService extends AbstractComponent { .setVersion(documentVersion) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) .request(); - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, - ActionListener.wrap(updateResponse -> { - if (updateResponse.getGetResult() != null - && updateResponse.getGetResult().sourceAsMap().containsKey(srcPrefix) - && ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix)) - .containsKey("invalidated")) { - final boolean prevInvalidated = (boolean) - ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix)) - .get("invalidated"); - listener.onResponse(prevInvalidated == false); - } else { - listener.onResponse(true); - } - }, e -> { - Throwable cause = ExceptionsHelper.unwrapCause(e); - if (cause instanceof DocumentMissingException) { - if (version.onOrAfter(Version.V_6_2_0)) { - // the document should always be there! - listener.onFailure(e); - } else { - listener.onResponse(false); - } - } else if (cause instanceof VersionConflictEngineException - || isShardNotAvailableException(cause)) { - attemptCount.incrementAndGet(); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(), - ActionListener.wrap(getResult -> { - if (getResult.isExists()) { - Map source = getResult.getSource(); - Map accessTokenSource = - (Map) source.get("access_token"); - if (accessTokenSource == null) { - listener.onFailure(new IllegalArgumentException("token document is " + - "missing access_token field")); - } else { - Boolean invalidated = (Boolean) accessTokenSource.get("invalidated"); - if (invalidated == null) { - listener.onFailure(new IllegalStateException( - "token document missing invalidated value")); - } else if (invalidated) { - listener.onResponse(false); - } else { - indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix, - getResult.getVersion()); - } - } - } else if (version.onOrAfter(Version.V_6_2_0)) { - logger.warn("could not find token document [{}] but there should " + - "be one as token has version [{}]", tokenDocId, version); - listener.onFailure(invalidGrantException("could not invalidate the token")); - } else { - listener.onResponse(false); - } - }, - e1 -> { - if (isShardNotAvailableException(e1)) { - // don't increment count; call again - indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix, - documentVersion); - } else { - listener.onFailure(e1); - } - }), client::get); - } else { + securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", tokenDocId, ex)), + () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, + ActionListener.wrap(updateResponse -> { + logger.debug("Invalidated [{}] for doc [{}]", srcPrefix, tokenDocId); + if (updateResponse.getGetResult() != null + && updateResponse.getGetResult().sourceAsMap().containsKey(srcPrefix) + && ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix)) + .containsKey("invalidated")) { + final boolean prevInvalidated = (boolean) + ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix)) + .get("invalidated"); + listener.onResponse(prevInvalidated == false); + } else { + listener.onResponse(true); + } + }, e -> { + Throwable cause = ExceptionsHelper.unwrapCause(e); + traceLog("invalidate token", tokenDocId, cause); + if (cause instanceof DocumentMissingException) { + if (version.onOrAfter(Version.V_6_2_0)) { + // the document should always be there! listener.onFailure(e); + } else { + listener.onResponse(false); } - }), client::update)); + } else if (cause instanceof VersionConflictEngineException + || isShardNotAvailableException(cause)) { + attemptCount.incrementAndGet(); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(), + ActionListener.wrap(getResult -> { + if (getResult.isExists()) { + Map source = getResult.getSource(); + Map accessTokenSource = (Map) source.get("access_token"); + Consumer onFailure = ex -> listener.onFailure(traceLog("get token", tokenDocId, ex)); + if (accessTokenSource == null) { + onFailure.accept(new IllegalArgumentException( + "token document is missing access_token field")); + } else { + Boolean invalidated = (Boolean) accessTokenSource.get("invalidated"); + if (invalidated == null) { + onFailure.accept(new IllegalStateException( + "token document missing invalidated value")); + } else if (invalidated) { + logger.trace("Token [{}] is already invalidated", tokenDocId); + listener.onResponse(false); + } else { + indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix, + getResult.getVersion()); + } + } + } else if (version.onOrAfter(Version.V_6_2_0)) { + logger.warn("could not find token document [{}] but there should " + + "be one as token has version [{}]", tokenDocId, version); + listener.onFailure(invalidGrantException("could not invalidate the token")); + } else { + listener.onResponse(false); + } + }, + e1 -> { + traceLog("get token", tokenDocId, e1); + if (isShardNotAvailableException(e1)) { + // don't increment count; call again + indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix, + documentVersion); + } else { + listener.onFailure(e1); + } + }), client::get); + } else { + listener.onFailure(e); + } + }), client::update)); } } @@ -663,7 +685,8 @@ public final class TokenService extends AbstractComponent { private void findTokenFromRefreshToken(String refreshToken, ActionListener> listener, AtomicInteger attemptCount) { - if (attemptCount.get() > 5) { + if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { + logger.warn("Failed to find token for refresh token [{}] after [{}] attempts", refreshToken, attemptCount.get()); listener.onFailure(invalidGrantException("could not refresh the requested token")); } else { SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) @@ -673,30 +696,37 @@ public final class TokenService extends AbstractComponent { .setVersion(true) .request(); - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> + if (securityIndex.isAvailable() == false) { + logger.debug("security index is not available to find token from refresh token, retrying"); + attemptCount.incrementAndGet(); + findTokenFromRefreshToken(refreshToken, listener, attemptCount); + } else { + Consumer onFailure = ex -> listener.onFailure(traceLog("find by refresh token", refreshToken, ex)); + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, - ActionListener.wrap(searchResponse -> { - if (searchResponse.isTimedOut()) { - attemptCount.incrementAndGet(); - findTokenFromRefreshToken(refreshToken, listener, attemptCount); - } else if (searchResponse.getHits().getHits().length < 1) { - logger.info("could not find token document with refresh_token [{}]", refreshToken); - listener.onFailure(invalidGrantException("could not refresh the requested token")); - } else if (searchResponse.getHits().getHits().length > 1) { - listener.onFailure(new IllegalStateException("multiple tokens share the same refresh token")); - } else { - listener.onResponse(new Tuple<>(searchResponse, attemptCount)); - } - }, e -> { - if (isShardNotAvailableException(e)) { - logger.debug("failed to search for token document, retrying", e); - attemptCount.incrementAndGet(); - findTokenFromRefreshToken(refreshToken, listener, attemptCount); - } else { - listener.onFailure(e); - } - }), - client::search)); + ActionListener.wrap(searchResponse -> { + if (searchResponse.isTimedOut()) { + attemptCount.incrementAndGet(); + findTokenFromRefreshToken(refreshToken, listener, attemptCount); + } else if (searchResponse.getHits().getHits().length < 1) { + logger.info("could not find token document with refresh_token [{}]", refreshToken); + onFailure.accept(invalidGrantException("could not refresh the requested token")); + } else if (searchResponse.getHits().getHits().length > 1) { + onFailure.accept(new IllegalStateException("multiple tokens share the same refresh token")); + } else { + listener.onResponse(new Tuple<>(searchResponse, attemptCount)); + } + }, e -> { + if (isShardNotAvailableException(e)) { + logger.debug("failed to search for token document, retrying", e); + attemptCount.incrementAndGet(); + findTokenFromRefreshToken(refreshToken, listener, attemptCount); + } else { + onFailure.accept(e); + } + }), + client::search)); + } } } @@ -707,62 +737,64 @@ public final class TokenService extends AbstractComponent { */ private void innerRefresh(String tokenDocId, Authentication userAuth, ActionListener> listener, AtomicInteger attemptCount) { - if (attemptCount.getAndIncrement() > 5) { + if (attemptCount.getAndIncrement() > MAX_RETRY_ATTEMPTS) { + logger.warn("Failed to refresh token for doc [{}] after [{}] attempts", tokenDocId, attemptCount.get()); listener.onFailure(invalidGrantException("could not refresh the requested token")); } else { + Consumer onFailure = ex -> listener.onFailure(traceLog("refresh token", tokenDocId, ex)); GetRequest getRequest = client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, getRequest, - ActionListener.wrap(response -> { - if (response.isExists()) { - final Map source = response.getSource(); - final Optional invalidSource = checkTokenDocForRefresh(source, userAuth); + ActionListener.wrap(response -> { + if (response.isExists()) { + final Map source = response.getSource(); + final Optional invalidSource = checkTokenDocForRefresh(source, userAuth); - if (invalidSource.isPresent()) { - listener.onFailure(invalidSource.get()); - } else { - final Map userTokenSource = (Map) - ((Map) source.get("access_token")).get("user_token"); - final String authString = (String) userTokenSource.get("authentication"); - final Integer version = (Integer) userTokenSource.get("version"); - final Map metadata = (Map) userTokenSource.get("metadata"); + if (invalidSource.isPresent()) { + onFailure.accept(invalidSource.get()); + } else { + final Map userTokenSource = (Map) + ((Map) source.get("access_token")).get("user_token"); + final String authString = (String) userTokenSource.get("authentication"); + final Integer version = (Integer) userTokenSource.get("version"); + final Map metadata = (Map) userTokenSource.get("metadata"); - Version authVersion = Version.fromId(version); - try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(authString))) { - in.setVersion(authVersion); - Authentication authentication = new Authentication(in); - UpdateRequest updateRequest = - client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId) - .setVersion(response.getVersion()) - .setDoc("refresh_token", Collections.singletonMap("refreshed", true)) - .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) - .request(); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, updateRequest, - ActionListener.wrap( - updateResponse -> createUserToken(authentication, userAuth, listener, metadata, true), - e -> { - Throwable cause = ExceptionsHelper.unwrapCause(e); - if (cause instanceof VersionConflictEngineException || - isShardNotAvailableException(e)) { - innerRefresh(tokenDocId, userAuth, - listener, attemptCount); - } else { - listener.onFailure(e); - } - }), - client::update); - } + Version authVersion = Version.fromId(version); + try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(authString))) { + in.setVersion(authVersion); + Authentication authentication = new Authentication(in); + UpdateRequest updateRequest = + client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId) + .setVersion(response.getVersion()) + .setDoc("refresh_token", Collections.singletonMap("refreshed", true)) + .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) + .request(); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, updateRequest, + ActionListener.wrap( + updateResponse -> createUserToken(authentication, userAuth, listener, metadata, true), + e -> { + Throwable cause = ExceptionsHelper.unwrapCause(e); + if (cause instanceof VersionConflictEngineException || + isShardNotAvailableException(e)) { + innerRefresh(tokenDocId, userAuth, + listener, attemptCount); + } else { + onFailure.accept(e); + } + }), + client::update); } - } else { - logger.info("could not find token document [{}] for refresh", tokenDocId); - listener.onFailure(invalidGrantException("could not refresh the requested token")); } - }, e -> { - if (isShardNotAvailableException(e)) { - innerRefresh(tokenDocId, userAuth, listener, attemptCount); - } else { - listener.onFailure(e); - } - }), client::get); + } else { + logger.info("could not find token document [{}] for refresh", tokenDocId); + onFailure.accept(invalidGrantException("could not refresh the requested token")); + } + }, e -> { + if (isShardNotAvailableException(e)) { + innerRefresh(tokenDocId, userAuth, listener, attemptCount); + } else { + listener.onFailure(e); + } + }), client::get); } } @@ -831,22 +863,22 @@ public final class TokenService extends AbstractComponent { if (Strings.isNullOrEmpty(realmName)) { listener.onFailure(new IllegalArgumentException("Realm name is required")); - return; - } - - final Instant now = clock.instant(); - final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() + } else if (securityIndex.isAvailable() == false) { + listener.onResponse(Collections.emptyList()); + } else { + final Instant now = clock.instant(); + final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery("doc_type", "token")) .filter(QueryBuilders.termQuery("access_token.realm", realmName)) .filter(QueryBuilders.boolQuery() - .should(QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery("access_token.invalidated", false)) - .must(QueryBuilders.rangeQuery("access_token.user_token.expiration_time").gte(now.toEpochMilli())) - ) - .should(QueryBuilders.termQuery("refresh_token.invalidated", false)) + .should(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("access_token.invalidated", false)) + .must(QueryBuilders.rangeQuery("access_token.user_token.expiration_time").gte(now.toEpochMilli())) + ) + .should(QueryBuilders.termQuery("refresh_token.invalidated", false)) ); - final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) + final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(boolQuery) .setVersion(false) @@ -854,9 +886,10 @@ public final class TokenService extends AbstractComponent { .setFetchSource(true) .request(); - final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> - ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), this::parseHit)); + final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> + ScrollHelper.fetchAllByEntity(client, request, new ContextPreservingActionListener<>(supplier, listener), this::parseHit)); + } } private Tuple parseHit(SearchHit hit) { @@ -923,14 +956,17 @@ public final class TokenService extends AbstractComponent { */ private void checkIfTokenIsRevoked(UserToken userToken, ActionListener listener) { if (securityIndex.indexExists() == false) { - // index doesn't exist so the token is considered valid. + // index doesn't exist so the token is considered valid. it is important to note that + // we do not use isAvailable as the lack of a shard being available is not equivalent + // to the index not existing in the case of revocation checking. listener.onResponse(userToken); } else { - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> { MultiGetRequest mGetRequest = client.prepareMultiGet() .add(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getInvalidatedTokenDocumentId(userToken)) .add(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(userToken)) .request(); + Consumer onFailure = ex -> listener.onFailure(traceLog("check token state", userToken.getId(), ex)); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, mGetRequest, new ActionListener() { @@ -941,26 +977,26 @@ public final class TokenService extends AbstractComponent { if (itemResponse[0].isFailed()) { onFailure(itemResponse[0].getFailure().getFailure()); } else if (itemResponse[0].getResponse().isExists()) { - listener.onFailure(expiredTokenException()); + onFailure.accept(expiredTokenException()); } else if (itemResponse[1].isFailed()) { onFailure(itemResponse[1].getFailure().getFailure()); } else if (itemResponse[1].getResponse().isExists()) { Map source = itemResponse[1].getResponse().getSource(); Map accessTokenSource = (Map) source.get("access_token"); if (accessTokenSource == null) { - listener.onFailure(new IllegalStateException("token document is missing access_token field")); + onFailure.accept(new IllegalStateException("token document is missing access_token field")); } else { Boolean invalidated = (Boolean) accessTokenSource.get("invalidated"); if (invalidated == null) { - listener.onFailure(new IllegalStateException("token document is missing invalidated field")); + onFailure.accept(new IllegalStateException("token document is missing invalidated field")); } else if (invalidated) { - listener.onFailure(expiredTokenException()); + onFailure.accept(expiredTokenException()); } else { listener.onResponse(userToken); } } } else if (userToken.getVersion().onOrAfter(Version.V_6_2_0)) { - listener.onFailure(new IllegalStateException("token document is missing and must be present")); + onFailure.accept(new IllegalStateException("token document is missing and must be present")); } else { listener.onResponse(userToken); } @@ -1122,11 +1158,31 @@ public final class TokenService extends AbstractComponent { */ private static ElasticsearchSecurityException invalidGrantException(String detail) { ElasticsearchSecurityException e = - new ElasticsearchSecurityException("invalid_grant", RestStatus.BAD_REQUEST); + new ElasticsearchSecurityException("invalid_grant", RestStatus.BAD_REQUEST); e.addHeader("error_description", detail); return e; } + /** + * Logs an exception at TRACE level (if enabled) + */ + private E traceLog(String action, String identifier, E exception) { + if (logger.isTraceEnabled()) { + if (exception instanceof ElasticsearchException) { + final ElasticsearchException esEx = (ElasticsearchException) exception; + final Object detail = esEx.getHeader("error_description"); + if (detail != null) { + logger.trace("Failure in [{}] for id [{}] - [{}] [{}]", action, identifier, detail, esEx.getDetailedMessage()); + } else { + logger.trace("Failure in [{}] for id [{}] - [{}]", action, identifier, esEx.getDetailedMessage()); + } + } else { + logger.trace("Failure in [{}] for id [{}] - [{}]", action, identifier, exception.toString()); + } + } + return exception; + } + boolean isExpiredTokenException(ElasticsearchSecurityException e) { final List headers = e.getHeader("WWW-Authenticate"); return headers != null && headers.stream().anyMatch(EXPIRED_TOKEN_WWW_AUTH_VALUE::equals); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 620c3817ebb..196a48416a4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -118,8 +118,7 @@ public class NativeUsersStore extends AbstractComponent { } }; - if (securityIndex.indexExists() == false) { - // TODO remove this short circuiting and fix tests that fail without this! + if (securityIndex.isAvailable() == false) { listener.onResponse(Collections.emptyList()); } else if (userNames.length == 1) { // optimization for single user lookup final String username = userNames[0]; @@ -127,7 +126,7 @@ public class NativeUsersStore extends AbstractComponent { (uap) -> listener.onResponse(uap == null ? Collections.emptyList() : Collections.singletonList(uap.user())), handleException)); } else { - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> { final QueryBuilder query; if (userNames == null || userNames.length == 0) { query = QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), USER_DOC_TYPE); @@ -155,10 +154,10 @@ public class NativeUsersStore extends AbstractComponent { } void getUserCount(final ActionListener listener) { - if (securityIndex.indexExists() == false) { + if (securityIndex.isAvailable() == false) { listener.onResponse(0L); } else { - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareSearch(SECURITY_INDEX_NAME) .setQuery(QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), USER_DOC_TYPE)) @@ -182,11 +181,10 @@ public class NativeUsersStore extends AbstractComponent { * Async method to retrieve a user and their password */ private void getUserAndPassword(final String user, final ActionListener listener) { - if (securityIndex.indexExists() == false) { - // TODO remove this short circuiting and fix tests that fail without this! + if (securityIndex.isAvailable() == false) { listener.onResponse(null); } else { - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareGet(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(USER_DOC_TYPE, user)).request(), @@ -459,16 +457,19 @@ public class NativeUsersStore extends AbstractComponent { } public void deleteUser(final DeleteUserRequest deleteUserRequest, final ActionListener listener) { - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { - DeleteRequest request = client.prepareDelete(SECURITY_INDEX_NAME, + if (securityIndex.isAvailable() == false) { + listener.onResponse(false); + } else { + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> { + DeleteRequest request = client.prepareDelete(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(USER_DOC_TYPE, deleteUserRequest.username())).request(); - request.setRefreshPolicy(deleteUserRequest.getRefreshPolicy()); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, + request.setRefreshPolicy(deleteUserRequest.getRefreshPolicy()); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, new ActionListener() { @Override public void onResponse(DeleteResponse deleteResponse) { clearRealmCache(deleteUserRequest.username(), listener, - deleteResponse.getResult() == DocWriteResponse.Result.DELETED); + deleteResponse.getResult() == DocWriteResponse.Result.DELETED); } @Override @@ -476,7 +477,8 @@ public class NativeUsersStore extends AbstractComponent { listener.onFailure(e); } }, client::delete); - }); + }); + } } /** @@ -498,11 +500,10 @@ public class NativeUsersStore extends AbstractComponent { } void getReservedUserInfo(String username, ActionListener listener) { - if (securityIndex.indexExists() == false) { - // TODO remove this short circuiting and fix tests that fail without this! + if (securityIndex.isAvailable() == false) { listener.onResponse(null); } else { - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareGet(SECURITY_INDEX_NAME, INDEX_TYPE, getIdForUser(RESERVED_USER_TYPE, username)).request(), @@ -541,49 +542,53 @@ public class NativeUsersStore extends AbstractComponent { } void getAllReservedUserInfo(ActionListener> listener) { - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareSearch(SECURITY_INDEX_NAME) + if (securityIndex.isAvailable() == false) { + listener.onResponse(Collections.emptyMap()); + } else { + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareSearch(SECURITY_INDEX_NAME) .setQuery(QueryBuilders.termQuery(Fields.TYPE.getPreferredName(), RESERVED_USER_TYPE)) .setFetchSource(true).request(), - new ActionListener() { - @Override - public void onResponse(SearchResponse searchResponse) { - Map userInfos = new HashMap<>(); - assert searchResponse.getHits().getTotalHits() <= 10 : + new ActionListener() { + @Override + public void onResponse(SearchResponse searchResponse) { + Map userInfos = new HashMap<>(); + assert searchResponse.getHits().getTotalHits() <= 10 : "there are more than 10 reserved users we need to change this to retrieve them all!"; - for (SearchHit searchHit : searchResponse.getHits().getHits()) { - Map sourceMap = searchHit.getSourceAsMap(); - String password = (String) sourceMap.get(Fields.PASSWORD.getPreferredName()); - Boolean enabled = (Boolean) sourceMap.get(Fields.ENABLED.getPreferredName()); - final String id = searchHit.getId(); - assert id != null && id.startsWith(RESERVED_USER_TYPE) : + for (SearchHit searchHit : searchResponse.getHits().getHits()) { + Map sourceMap = searchHit.getSourceAsMap(); + String password = (String) sourceMap.get(Fields.PASSWORD.getPreferredName()); + Boolean enabled = (Boolean) sourceMap.get(Fields.ENABLED.getPreferredName()); + final String id = searchHit.getId(); + assert id != null && id.startsWith(RESERVED_USER_TYPE) : "id [" + id + "] does not start with reserved-user prefix"; - final String username = id.substring(RESERVED_USER_TYPE.length() + 1); - if (password == null) { - listener.onFailure(new IllegalStateException("password hash must not be null!")); - return; - } else if (enabled == null) { - listener.onFailure(new IllegalStateException("enabled must not be null!")); - return; + final String username = id.substring(RESERVED_USER_TYPE.length() + 1); + if (password == null) { + listener.onFailure(new IllegalStateException("password hash must not be null!")); + return; + } else if (enabled == null) { + listener.onFailure(new IllegalStateException("enabled must not be null!")); + return; + } else { + userInfos.put(username, new ReservedUserInfo(password.toCharArray(), enabled, false)); + } + } + listener.onResponse(userInfos); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof IndexNotFoundException) { + logger.trace("could not retrieve built in users since security index does not exist", e); + listener.onResponse(Collections.emptyMap()); } else { - userInfos.put(username, new ReservedUserInfo(password.toCharArray(), enabled, false)); + logger.error("failed to retrieve built in users", e); + listener.onFailure(e); } } - listener.onResponse(userInfos); - } - - @Override - public void onFailure(Exception e) { - if (e instanceof IndexNotFoundException) { - logger.trace("could not retrieve built in users since security index does not exist", e); - listener.onResponse(Collections.emptyMap()); - } else { - logger.error("failed to retrieve built in users", e); - listener.onFailure(e); - } - } - }, client::search)); + }, client::search)); + } } private void clearRealmCache(String username, ActionListener listener, Response response) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index b45de8184d6..ba3e5ee3793 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -220,32 +220,32 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol }); } - private void innerDeleteMapping(DeleteRoleMappingRequest request, ActionListener listener) throws IOException { - if (securityIndex.isIndexUpToDate() == false) { - listener.onFailure(new IllegalStateException( - "Security index is not on the current version - the native realm will not be operational until " + - "the upgrade API is run on the security index")); - return; - } - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareDelete(SECURITY_INDEX_NAME, SECURITY_GENERIC_TYPE, getIdForName(request.getName())) + private void innerDeleteMapping(DeleteRoleMappingRequest request, ActionListener listener) { + if (securityIndex.isAvailable() == false) { + listener.onResponse(false); + } else { + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareDelete(SECURITY_INDEX_NAME, SECURITY_GENERIC_TYPE, getIdForName(request.getName())) .setRefreshPolicy(request.getRefreshPolicy()) .request(), - new ActionListener() { + new ActionListener() { - @Override - public void onResponse(DeleteResponse deleteResponse) { - boolean deleted = deleteResponse.getResult() == DELETED; - listener.onResponse(deleted); - } + @Override + public void onResponse(DeleteResponse deleteResponse) { + boolean deleted = deleteResponse.getResult() == DELETED; + listener.onResponse(deleted); + } - @Override - public void onFailure(Exception e) { - logger.error(new ParameterizedMessage("failed to delete role-mapping [{}]", request.getName()), e); - listener.onFailure(e); + @Override + public void onFailure(Exception e) { + logger.error(new ParameterizedMessage("failed to delete role-mapping [{}]", request.getName()), e); + listener.onFailure(e); - } - }, client::delete); + } + }, client::delete); + }); + } } /** @@ -301,7 +301,7 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol * */ public void usageStats(ActionListener> listener) { - if (securityIndex.indexExists() == false) { + if (securityIndex.isAvailable() == false) { reportStats(listener, Collections.emptyList()); } else { getMappings(ActionListener.wrap(mappings -> reportStats(listener, mappings), listener::onFailure)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java index 2cfa89b647c..e7a27855f5d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java @@ -88,13 +88,15 @@ public class NativePrivilegeStore extends AbstractComponent { public void getPrivileges(Collection applications, Collection names, ActionListener> listener) { - if (applications != null && applications.size() == 1 && names != null && names.size() == 1) { + if (securityIndexManager.isAvailable() == false) { + listener.onResponse(Collections.emptyList()); + } else if (applications != null && applications.size() == 1 && names != null && names.size() == 1) { getPrivilege(Objects.requireNonNull(Iterables.get(applications, 0)), Objects.requireNonNull(Iterables.get(names, 0)), ActionListener.wrap(privilege -> listener.onResponse(privilege == null ? Collections.emptyList() : Collections.singletonList(privilege)), listener::onFailure)); } else { - securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { + securityIndexManager.checkIndexVersionThenExecute(listener::onFailure, () -> { final QueryBuilder query; final TermQueryBuilder typeQuery = QueryBuilders .termQuery(ApplicationPrivilegeDescriptor.Fields.TYPE.getPreferredName(), DOC_TYPE_VALUE); @@ -134,33 +136,37 @@ public class NativePrivilegeStore extends AbstractComponent { return collection == null || collection.isEmpty(); } - public void getPrivilege(String application, String name, ActionListener listener) { - securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, - () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareGet(SECURITY_INDEX_NAME, "doc", toDocId(application, name)).request(), - new ActionListener() { - @Override - public void onResponse(GetResponse response) { - if (response.isExists()) { - listener.onResponse(buildPrivilege(response.getId(), response.getSourceAsBytesRef())); - } else { - listener.onResponse(null); + void getPrivilege(String application, String name, ActionListener listener) { + if (securityIndexManager.isAvailable() == false) { + listener.onResponse(null); + } else { + securityIndexManager.checkIndexVersionThenExecute(listener::onFailure, + () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareGet(SECURITY_INDEX_NAME, "doc", toDocId(application, name)).request(), + new ActionListener() { + @Override + public void onResponse(GetResponse response) { + if (response.isExists()) { + listener.onResponse(buildPrivilege(response.getId(), response.getSourceAsBytesRef())); + } else { + listener.onResponse(null); + } } - } - @Override - public void onFailure(Exception e) { - // if the index or the shard is not there / available we just claim the privilege is not there - if (TransportActions.isShardNotAvailableException(e)) { - logger.warn(new ParameterizedMessage("failed to load privilege [{}] index not available", name), e); - listener.onResponse(null); - } else { - logger.error(new ParameterizedMessage("failed to load privilege [{}]", name), e); - listener.onFailure(e); + @Override + public void onFailure(Exception e) { + // if the index or the shard is not there / available we just claim the privilege is not there + if (TransportActions.isShardNotAvailableException(e)) { + logger.warn(new ParameterizedMessage("failed to load privilege [{}] index not available", name), e); + listener.onResponse(null); + } else { + logger.error(new ParameterizedMessage("failed to load privilege [{}]", name), e); + listener.onFailure(e); + } } - } - }, - client::get)); + }, + client::get)); + } } public void putPrivileges(Collection privileges, WriteRequest.RefreshPolicy refreshPolicy, @@ -200,23 +206,27 @@ public class NativePrivilegeStore extends AbstractComponent { public void deletePrivileges(String application, Collection names, WriteRequest.RefreshPolicy refreshPolicy, ActionListener>> listener) { - securityIndexManager.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { - ActionListener groupListener = new GroupedActionListener<>( - ActionListener.wrap(responses -> { - final Map> deletedNames = responses.stream() - .filter(r -> r.getResult() == DocWriteResponse.Result.DELETED) - .map(r -> r.getId()) - .map(NativePrivilegeStore::nameFromDocId) - .collect(TUPLES_TO_MAP); - clearRolesCache(listener, deletedNames); - }, listener::onFailure), names.size(), Collections.emptyList()); - for (String name : names) { - ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareDelete(SECURITY_INDEX_NAME, "doc", toDocId(application, name)) - .setRefreshPolicy(refreshPolicy) - .request(), groupListener, client::delete); - } - }); + if (securityIndexManager.isAvailable() == false) { + listener.onResponse(Collections.emptyMap()); + } else { + securityIndexManager.checkIndexVersionThenExecute(listener::onFailure, () -> { + ActionListener groupListener = new GroupedActionListener<>( + ActionListener.wrap(responses -> { + final Map> deletedNames = responses.stream() + .filter(r -> r.getResult() == DocWriteResponse.Result.DELETED) + .map(r -> r.getId()) + .map(NativePrivilegeStore::nameFromDocId) + .collect(TUPLES_TO_MAP); + clearRolesCache(listener, deletedNames); + }, listener::onFailure), names.size(), Collections.emptyList()); + for (String name : names) { + ClientHelper.executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, + client.prepareDelete(SECURITY_INDEX_NAME, "doc", toDocId(application, name)) + .setRefreshPolicy(refreshPolicy) + .request(), groupListener, client::delete); + } + }); + } } private void clearRolesCache(ActionListener listener, T value) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index d604d166812..c5be68f4199 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -116,7 +116,7 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer { + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> { QueryBuilder query; if (names == null || names.isEmpty()) { query = QueryBuilders.termQuery(RoleDescriptor.Fields.TYPE.getPreferredName(), ROLE_TYPE); @@ -144,16 +144,19 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer listener) { - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> { - DeleteRequest request = client.prepareDelete(SecurityIndexManager.SECURITY_INDEX_NAME, + if (securityIndex.isAvailable() == false) { + listener.onResponse(false); + } else { + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> { + DeleteRequest request = client.prepareDelete(SecurityIndexManager.SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(deleteRoleRequest.name())).request(); - request.setRefreshPolicy(deleteRoleRequest.getRefreshPolicy()); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, + request.setRefreshPolicy(deleteRoleRequest.getRefreshPolicy()); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, new ActionListener() { @Override public void onResponse(DeleteResponse deleteResponse) { clearRoleCache(deleteRoleRequest.name(), listener, - deleteResponse.getResult() == DocWriteResponse.Result.DELETED); + deleteResponse.getResult() == DocWriteResponse.Result.DELETED); } @Override @@ -162,7 +165,8 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer listener) { @@ -210,13 +214,13 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer> listener) { Map usageStats = new HashMap<>(3); - if (securityIndex.indexExists() == false) { + if (securityIndex.isAvailable() == false) { usageStats.put("size", 0L); usageStats.put("fls", false); usageStats.put("dls", false); listener.onResponse(usageStats); } else { - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareMultiSearch() .add(client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) @@ -298,7 +302,7 @@ public class NativeRolesStore extends AbstractComponent implements BiConsumer listener) { - securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> + securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, client.prepareGet(SECURITY_INDEX_NAME, ROLE_DOC_TYPE, getIdForUser(role)).request(), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java index 2310afe4f77..636885d73a1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java @@ -104,6 +104,7 @@ public final class RestGetTokenAction extends SecurityBaseRestHandler { @Override public void onFailure(Exception e) { + logger.debug("Failed to create token", e); if (e instanceof ActionRequestValidationException) { ActionRequestValidationException validationException = (ActionRequestValidationException) e; final TokenRequestError error; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java index d02b569a744..18d86ab028c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexManager.java @@ -253,6 +253,23 @@ public class SecurityIndexManager extends AbstractComponent implements ClusterSt } } + /** + * Validates the security index is up to date and does not need to migrated. If it is not, the + * consumer is called with an exception. If the security index is up to date, the runnable will + * be executed. NOTE: this method does not check the availability of the index; this check + * is left to the caller so that this condition can be handled appropriately. + */ + public void checkIndexVersionThenExecute(final Consumer consumer, final Runnable andThen) { + final State indexState = this.indexState; // use a local copy so all checks execute against the same state! + if (indexState.indexExists && indexState.isIndexUpToDate == false) { + consumer.accept(new IllegalStateException( + "Security index is not on the current version. Security features relying on the index will not be available until " + + "the upgrade API is run on the security index")); + } else { + andThen.run(); + } + } + /** * Prepares the index by creating it if it doesn't exist or updating the mappings if the mappings are * out of date. After any tasks have been executed, the runnable is then executed. diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index 7143182c162..c3e3bddf10e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -152,7 +152,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { public static void initDefaultSettings() { if (SECURITY_DEFAULT_SETTINGS == null) { SECURITY_DEFAULT_SETTINGS = - new SecuritySettingsSource(defaultMaxNumberOfNodes(), randomBoolean(), createTempDir(), Scope.SUITE); + new SecuritySettingsSource(randomBoolean(), createTempDir(), Scope.SUITE); } } @@ -367,7 +367,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { private class CustomSecuritySettingsSource extends SecuritySettingsSource { private CustomSecuritySettingsSource(boolean sslEnabled, Path configDir, Scope scope) { - super(maxNumberOfNodes(), sslEnabled, configDir, scope); + super(sslEnabled, configDir, scope); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index 76482c5fa92..6098562ec3a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.reindex.ReindexPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; import org.elasticsearch.transport.Netty4Plugin; import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.XPackSettings; @@ -49,13 +48,10 @@ import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile; /** * {@link org.elasticsearch.test.NodeConfigurationSource} subclass that allows to set all needed settings for x-pack security. - * Unicast discovery is configured through {@link org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration.UnicastZen}, - * also x-pack is installed with all the needed configuration and files. + * X-pack is installed with all the needed configuration and files. * To avoid conflicts, every cluster should have its own instance of this class as some configuration files need to be created. */ -public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.UnicastZen { - - public static final Settings DEFAULT_SETTINGS = Settings.EMPTY; +public class SecuritySettingsSource extends NodeConfigurationSource { public static final String TEST_USER_NAME = "test_user"; public static final String TEST_PASSWORD_HASHED = @@ -93,13 +89,11 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas /** * Creates a new {@link org.elasticsearch.test.NodeConfigurationSource} for the security configuration. * - * @param numOfNodes the number of nodes for proper unicast configuration (can be more than actually available) * @param sslEnabled whether ssl is enabled * @param parentFolder the parent folder that will contain all of the configuration files that need to be created * @param scope the scope of the test that is requiring an instance of SecuritySettingsSource */ - public SecuritySettingsSource(int numOfNodes, boolean sslEnabled, Path parentFolder, Scope scope) { - super(numOfNodes, DEFAULT_SETTINGS); + public SecuritySettingsSource(boolean sslEnabled, Path parentFolder, Scope scope) { this.parentFolder = parentFolder; this.subfolderPrefix = scope.name(); this.sslEnabled = sslEnabled; @@ -129,7 +123,7 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas writeFile(xpackConf, "users", configUsers()); writeFile(xpackConf, "users_roles", configUsersRoles()); - Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)) + Settings.Builder builder = Settings.builder() .put(XPackSettings.SECURITY_ENABLED.getKey(), true) .put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO) .put(NetworkModule.HTTP_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO) @@ -156,10 +150,9 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas @Override public Settings transportClientSettings() { - Settings superSettings = super.transportClientSettings(); - Settings.Builder builder = Settings.builder().put(superSettings); + Settings.Builder builder = Settings.builder(); addClientSSLSettings(builder, ""); - addDefaultSecurityTransportType(builder, superSettings); + addDefaultSecurityTransportType(builder, Settings.EMPTY); if (randomBoolean()) { builder.put(SecurityField.USER_SETTING.getKey(), @@ -253,16 +246,6 @@ public class SecuritySettingsSource extends ClusterDiscoveryConfiguration.Unicas } } - /** - * Returns the configuration settings given the location of a certificate and its password - * - * @param resourcePathToStore the location of the keystore or truststore - * @param password the password - */ - public static void addSSLSettingsForStore(Settings.Builder builder, String resourcePathToStore, String password) { - addSSLSettingsForStore(builder, "", resourcePathToStore, password, true, true, true); - } - private static void addSSLSettingsForStore(Settings.Builder builder, String prefix, String resourcePathToStore, String password, boolean sslEnabled, boolean hostnameVerificationEnabled, boolean transportClient) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java index cda627806e7..e555bfdb3d3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySingleNodeTestCase.java @@ -65,7 +65,7 @@ public abstract class SecuritySingleNodeTestCase extends ESSingleNodeTestCase { public static void initDefaultSettings() { if (SECURITY_DEFAULT_SETTINGS == null) { SECURITY_DEFAULT_SETTINGS = - new SecuritySettingsSource(1, randomBoolean(), createTempDir(), ESIntegTestCase.Scope.SUITE); + new SecuritySettingsSource(randomBoolean(), createTempDir(), ESIntegTestCase.Scope.SUITE); } } @@ -235,7 +235,7 @@ public abstract class SecuritySingleNodeTestCase extends ESSingleNodeTestCase { private class CustomSecuritySettingsSource extends SecuritySettingsSource { private CustomSecuritySettingsSource(boolean sslEnabled, Path configDir, ESIntegTestCase.Scope scope) { - super(1, sslEnabled, configDir, scope); + super(sslEnabled, configDir, scope); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 17a45f23893..65f95ecb2f8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -163,6 +163,11 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { ((Runnable) inv.getArguments()[1]).run(); return null; }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); + doAnswer(inv -> { + ((Runnable) inv.getArguments()[1]).run(); + return null; + }).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class)); + when(securityIndex.isAvailable()).thenReturn(true); final ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 291c102f396..f2f94176edc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -178,6 +178,11 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { ((Runnable) inv.getArguments()[1]).run(); return null; }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); + doAnswer(inv -> { + ((Runnable) inv.getArguments()[1]).run(); + return null; + }).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class)); + when(securityIndex.isAvailable()).thenReturn(true); final ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java index cb1b69708bd..cf19af9c5ec 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java @@ -178,7 +178,7 @@ public class IndexAuditTrailTests extends SecurityIntegTestCase { logger.info("--> remote indexing enabled. security enabled: [{}], SSL enabled: [{}], nodes: [{}]", useSecurity, useSSL, numNodes); SecuritySettingsSource cluster2SettingsSource = - new SecuritySettingsSource(numNodes, useSSL, createTempDir(), Scope.SUITE) { + new SecuritySettingsSource(useSSL, createTempDir(), Scope.SUITE) { @Override public Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/RemoteIndexAuditTrailStartingTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/RemoteIndexAuditTrailStartingTests.java index 96bba962237..ba62e5b52c4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/RemoteIndexAuditTrailStartingTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/RemoteIndexAuditTrailStartingTests.java @@ -95,7 +95,7 @@ public class RemoteIndexAuditTrailStartingTests extends SecurityIntegTestCase { // Setup a second test cluster with a single node, security enabled, and SSL final int numNodes = 1; SecuritySettingsSource cluster2SettingsSource = - new SecuritySettingsSource(numNodes, sslEnabled, createTempDir(), Scope.TEST) { + new SecuritySettingsSource(sslEnabled, createTempDir(), Scope.TEST) { @Override public Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index ef5b0386bc2..54908389610 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -87,6 +87,7 @@ import java.util.function.Consumer; import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException; import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError; +import static org.elasticsearch.xpack.security.authc.TokenServiceTests.mockCheckTokenInvalidationFromId; import static org.elasticsearch.xpack.security.authc.TokenServiceTests.mockGetTokenFromId; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -187,6 +188,11 @@ public class AuthenticationServiceTests extends ESTestCase { runnable.run(); return null; }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); + doAnswer(invocationOnMock -> { + Runnable runnable = (Runnable) invocationOnMock.getArguments()[1]; + runnable.run(); + return null; + }).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class)); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService); service = new AuthenticationService(settings, realms, auditTrail, @@ -898,7 +904,11 @@ public class AuthenticationServiceTests extends ESTestCase { tokenService.createUserToken(expected, originatingAuth, tokenFuture, Collections.emptyMap(), true); } String token = tokenService.getUserTokenString(tokenFuture.get().v1()); + when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE)); mockGetTokenFromId(tokenFuture.get().v1(), client); + mockCheckTokenInvalidationFromId(tokenFuture.get().v1(), client); + when(securityIndex.isAvailable()).thenReturn(true); + when(securityIndex.indexExists()).thenReturn(true); try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { threadContext.putHeader("Authorization", "Bearer " + token); service.authenticate("_action", message, (User)null, ActionListener.wrap(result -> { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 213def0f0fe..7926b44a38c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -137,6 +137,13 @@ public class TokenServiceTests extends ESTestCase { runnable.run(); return null; }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); + doAnswer(invocationOnMock -> { + Runnable runnable = (Runnable) invocationOnMock.getArguments()[1]; + runnable.run(); + return null; + }).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class)); + when(securityIndex.indexExists()).thenReturn(true); + when(securityIndex.isAvailable()).thenReturn(true); this.clusterService = ClusterServiceUtils.createClusterService(threadPool); } @@ -161,6 +168,7 @@ public class TokenServiceTests extends ESTestCase { final UserToken token = tokenFuture.get().v1(); assertNotNull(token); mockGetTokenFromId(token); + mockCheckTokenInvalidationFromId(token); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", randomFrom("Bearer ", "BEARER ", "bearer ") + tokenService.getUserTokenString(token)); @@ -207,6 +215,7 @@ public class TokenServiceTests extends ESTestCase { final UserToken token = tokenFuture.get().v1(); assertNotNull(token); mockGetTokenFromId(token); + mockCheckTokenInvalidationFromId(token); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -266,6 +275,7 @@ public class TokenServiceTests extends ESTestCase { final UserToken token = tokenFuture.get().v1(); assertNotNull(token); mockGetTokenFromId(token); + mockCheckTokenInvalidationFromId(token); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -296,6 +306,7 @@ public class TokenServiceTests extends ESTestCase { final UserToken token = tokenFuture.get().v1(); assertNotNull(token); mockGetTokenFromId(token); + mockCheckTokenInvalidationFromId(token); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -357,6 +368,7 @@ public class TokenServiceTests extends ESTestCase { final UserToken token = tokenFuture.get().v1(); assertNotNull(token); mockGetTokenFromId(token); + mockCheckTokenInvalidationFromId(token); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -454,6 +466,7 @@ public class TokenServiceTests extends ESTestCase { tokenService.createUserToken(authentication, authentication, tokenFuture, Collections.emptyMap(), true); final UserToken token = tokenFuture.get().v1(); mockGetTokenFromId(token); + mockCheckTokenInvalidationFromId(token); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); @@ -577,14 +590,25 @@ public class TokenServiceTests extends ESTestCase { try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); tokenService.getAndValidateToken(requestContext, future); - UserToken serialized = future.get(); - assertEquals(authentication, serialized.getAuthentication()); + assertNull(future.get()); when(securityIndex.isAvailable()).thenReturn(false); when(securityIndex.indexExists()).thenReturn(true); future = new PlainActionFuture<>(); tokenService.getAndValidateToken(requestContext, future); assertNull(future.get()); + + when(securityIndex.indexExists()).thenReturn(false); + future = new PlainActionFuture<>(); + tokenService.getAndValidateToken(requestContext, future); + assertNull(future.get()); + + when(securityIndex.isAvailable()).thenReturn(true); + when(securityIndex.indexExists()).thenReturn(true); + mockCheckTokenInvalidationFromId(token); + future = new PlainActionFuture<>(); + tokenService.getAndValidateToken(requestContext, future); + assertEquals(token.getAuthentication(), future.get().getAuthentication()); } } @@ -625,4 +649,38 @@ public class TokenServiceTests extends ESTestCase { return Void.TYPE; }).when(client).get(any(GetRequest.class), any(ActionListener.class)); } + + private void mockCheckTokenInvalidationFromId(UserToken userToken) { + mockCheckTokenInvalidationFromId(userToken, client); + } + + public static void mockCheckTokenInvalidationFromId(UserToken userToken, Client client) { + doAnswer(invocationOnMock -> { + MultiGetRequest request = (MultiGetRequest) invocationOnMock.getArguments()[0]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + MultiGetResponse response = mock(MultiGetResponse.class); + MultiGetItemResponse[] responses = new MultiGetItemResponse[2]; + when(response.getResponses()).thenReturn(responses); + GetResponse legacyResponse = mock(GetResponse.class); + responses[0] = new MultiGetItemResponse(legacyResponse, null); + when(legacyResponse.isExists()).thenReturn(false); + GetResponse tokenResponse = mock(GetResponse.class); + if (userToken.getId().equals(request.getItems().get(1).id().replace("token_", ""))) { + when(tokenResponse.isExists()).thenReturn(true); + Map sourceMap = new HashMap<>(); + try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { + userToken.toXContent(builder, ToXContent.EMPTY_PARAMS); + Map accessTokenMap = new HashMap<>(); + accessTokenMap.put("user_token", + XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false)); + accessTokenMap.put("invalidated", false); + sourceMap.put("access_token", accessTokenMap); + } + when(tokenResponse.getSource()).thenReturn(sourceMap); + } + responses[1] = new MultiGetItemResponse(tokenResponse, null); + listener.onResponse(response); + return Void.TYPE; + }).when(client).multiGet(any(MultiGetRequest.class), any(ActionListener.class)); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index f280e85f4ab..425bebc0600 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -242,6 +242,11 @@ public class NativeUsersStoreTests extends ESTestCase { action.run(); return null; }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); + doAnswer((i) -> { + Runnable action = (Runnable) i.getArguments()[1]; + action.run(); + return null; + }).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class)); return new NativeUsersStore(Settings.EMPTY, client, securityIndex); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index 89058cf4a8b..9b08691f415 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -96,6 +96,12 @@ public class NativePrivilegeStoreTests extends ESTestCase { ((Runnable) invocationOnMock.getArguments()[1]).run(); return null; }).when(securityIndex).prepareIndexIfNeededThenExecute(any(Consumer.class), any(Runnable.class)); + Mockito.doAnswer(invocationOnMock -> { + assertThat(invocationOnMock.getArguments().length, equalTo(2)); + assertThat(invocationOnMock.getArguments()[1], instanceOf(Runnable.class)); + ((Runnable) invocationOnMock.getArguments()[1]).run(); + return null; + }).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class)); store = new NativePrivilegeStore(Settings.EMPTY, client, securityIndex); } diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index 1a7d6115e15..4268daf7347 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -57,3 +57,11 @@ artifacts { nodeps nodepsJar archives shadowJar } + +publishing { + publications { + nebula { + artifactId = archivesBaseName + } + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPainlessExtension.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPainlessExtension.java index 4a688eb3334..f0ca0cb8730 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPainlessExtension.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPainlessExtension.java @@ -17,6 +17,8 @@ import org.elasticsearch.script.SearchScript; import java.util.HashMap; import java.util.List; import java.util.Map; +import org.elasticsearch.script.NumberSortScript; +import org.elasticsearch.script.StringSortScript; import static java.util.Collections.singletonList; @@ -31,7 +33,8 @@ public class SqlPainlessExtension implements PainlessExtension { whitelist.put(FilterScript.CONTEXT, list); whitelist.put(AggregationScript.CONTEXT, list); whitelist.put(SearchScript.CONTEXT, list); - whitelist.put(SearchScript.SCRIPT_SORT_CONTEXT, list); + whitelist.put(NumberSortScript.CONTEXT, list); + whitelist.put(StringSortScript.CONTEXT, list); whitelist.put(BucketAggregationSelectorScript.CONTEXT, list); return whitelist; } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml index 23df0c58377..cbb6f8956b1 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml @@ -196,7 +196,7 @@ setup: "Validation failures": - do: - catch: /Could not find a \[numeric\] field with name \[field_doesnt_exist\] in any of the indices matching the index pattern/ + catch: /Could not find a \[numeric\] or \[date\] field with name \[field_doesnt_exist\] in any of the indices matching the index pattern/ headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser xpack.rollup.put_job: diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java index c7a92dccab8..42c2d9cd07e 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java @@ -123,12 +123,6 @@ public class OpenLdapUserSearchSessionFactoryTests extends ESTestCase { } } - private MockSecureSettings newSecureSettings(String key, String value) { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(key, value); - return secureSettings; - } - private LdapSession session(SessionFactory factory, String username, SecureString password) { PlainActionFuture future = new PlainActionFuture<>(); factory.session(username, password, future);