From e25d7db01a3d45bfa6f1b708f9cde473c0e96187 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 10 Dec 2017 14:59:30 -0500 Subject: [PATCH 01/10] Quote version value in Docker Compose file The example Docker Compose file does not quote the version value however the Docker Compose documentation specifies this value should be quoted to distinguish it from being treated as a number. Relate elastic/elasticsearch#27745 Original commit: elastic/x-pack-elasticsearch@42ad68c3ac6ddbdd0d94603e6487067422b3a86a --- docs/en/setup/docker.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/setup/docker.asciidoc b/docs/en/setup/docker.asciidoc index e4575335dc0..ccd536a8eaa 100644 --- a/docs/en/setup/docker.asciidoc +++ b/docs/en/setup/docker.asciidoc @@ -178,7 +178,7 @@ endif::[] ifeval::["{release-state}"!="unreleased"] ["source","yaml",subs="attributes"] -------------------------------------------- -version: 2.2 +version: '2.2' services: elasticsearch: image: {docker-image} From 6bae4681e252e7d7973dafbca5a5ccf6c2f13d55 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Mon, 11 Dec 2017 13:08:58 +0100 Subject: [PATCH 02/10] Tests: Replace http input integration test with REST test (elastic/x-pack-elasticsearch#3215) Remove HttpInputIntegrationTests, which only tested existing functionality, the remaining part was moved over into a REST tests. relates elastic/x-pack-elasticsearch#3210 Original commit: elastic/x-pack-elasticsearch@474e5337b6554144798977a6e5022b454e3af988 --- .../input/http/HttpInputIntegrationTests.java | 147 ------------------ .../watcher/input/http/HttpInputTests.java | 9 +- .../watcher/execute_watch/60_http_input.yml | 58 +++++++ .../build.gradle | 2 + 4 files changed, 63 insertions(+), 153 deletions(-) delete mode 100644 plugin/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputIntegrationTests.java create mode 100644 plugin/src/test/resources/rest-api-spec/test/watcher/execute_watch/60_http_input.yml diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputIntegrationTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputIntegrationTests.java deleted file mode 100644 index c889d9975d9..00000000000 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputIntegrationTests.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.watcher.input.http; - -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.transport.Netty4Plugin; -import org.elasticsearch.xpack.watcher.client.WatcherClient; -import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; -import org.elasticsearch.xpack.watcher.common.text.TextTemplate; -import org.elasticsearch.xpack.watcher.condition.CompareCondition; -import org.elasticsearch.xpack.watcher.history.HistoryStore; -import org.elasticsearch.xpack.watcher.support.xcontent.XContentSource; -import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; -import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchResponse; -import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; - -import java.net.InetSocketAddress; -import java.util.ArrayList; -import java.util.Collection; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction; -import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder; -import static org.elasticsearch.xpack.watcher.input.InputBuilders.httpInput; -import static org.elasticsearch.xpack.watcher.test.WatcherTestUtils.xContentSource; -import static org.elasticsearch.xpack.watcher.trigger.TriggerBuilders.schedule; -import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.interval; -import static org.hamcrest.Matchers.equalTo; - -@TestLogging("org.elasticsearch.xpack.watcher:DEBUG,org.elasticsearch.xpack.watcher.WatcherIndexingListener:TRACE") -public class HttpInputIntegrationTests extends AbstractWatcherIntegrationTestCase { - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(NetworkModule.HTTP_ENABLED.getKey(), true) - .build(); - } - - @Override - protected Collection> nodePlugins() { - ArrayList> plugins = new ArrayList<>(super.nodePlugins()); - plugins.add(Netty4Plugin.class); // for http - return plugins; - } - - public void testHttpInput() throws Exception { - createIndex("index"); - client().prepareIndex("index", "type", "id").setSource("{}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get(); - - InetSocketAddress address = internalCluster().httpAddresses()[0]; - watcherClient().preparePutWatch("_name") - .setSource(watchBuilder() - .trigger(schedule(interval("5s"))) - .input(httpInput(HttpRequestTemplate.builder(address.getHostString(), address.getPort()) - .path("/index/_search") - .body(jsonBuilder().startObject().field("size", 1).endObject().string()) - .putHeader("Content-Type", new TextTemplate("application/json")))) - .condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L)) - .addAction("_id", loggingAction("anything"))) - .get(); - - timeWarp().trigger("_name"); - refresh(); - assertWatchWithMinimumPerformedActionsCount("_name", 1, false); - } - - public void testHttpInputClusterStats() throws Exception { - InetSocketAddress address = internalCluster().httpAddresses()[0]; - PutWatchResponse putWatchResponse = watcherClient().preparePutWatch("_name") - .setSource(watchBuilder() - .trigger(schedule(interval("1s"))) - .input(httpInput(HttpRequestTemplate.builder(address.getHostString(), address.getPort()).path("/_cluster/stats"))) - .condition(new CompareCondition("ctx.payload.nodes.count.total", CompareCondition.Op.GTE, 1L)) - .addAction("_id", loggingAction("anything"))) - .get(); - - assertTrue(putWatchResponse.isCreated()); - timeWarp().trigger("_name"); - refresh(); - assertWatchWithMinimumPerformedActionsCount("_name", 1, false); - } - - public void testInputFiltering() throws Exception { - WatcherClient watcherClient = watcherClient(); - createIndex("idx"); - // Have a sample document in the index, the watch is going to evaluate - client().prepareIndex("idx", "type").setSource("field", "value").get(); - refresh(); - - InetSocketAddress address = internalCluster().httpAddresses()[0]; - XContentBuilder body = jsonBuilder().prettyPrint().startObject() - .field("query").value(termQuery("field", "value")) - .endObject(); - HttpRequestTemplate.Builder requestBuilder = HttpRequestTemplate.builder(address.getHostString(), address.getPort()) - .path(new TextTemplate("/idx/_search")) - .body(body.string()); - - watcherClient.preparePutWatch("_name1") - .setSource(watchBuilder() - .trigger(schedule(interval(10, IntervalSchedule.Interval.Unit.SECONDS))) - .input(httpInput(requestBuilder).extractKeys("hits.total")) - .condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L))) - .get(); - - // in this watcher the condition will fail, because max_score isn't extracted, only total: - watcherClient.preparePutWatch("_name2") - .setSource(watchBuilder() - .trigger(schedule(interval(10, IntervalSchedule.Interval.Unit.SECONDS))) - .input(httpInput(requestBuilder).extractKeys("hits.total")) - .condition(new CompareCondition("ctx.payload.hits.max_score", CompareCondition.Op.GTE, 0L))) - .get(); - - timeWarp().trigger("_name1"); - timeWarp().trigger("_name2"); - refresh(); - - assertWatchWithMinimumPerformedActionsCount("_name1", 1, false); - assertWatchWithNoActionNeeded("_name2", 1); - - // Check that the input result payload has been filtered - refresh(); - SearchResponse searchResponse = client().prepareSearch(HistoryStore.INDEX_PREFIX_WITH_TEMPLATE + "*") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .setQuery(matchQuery("watch_id", "_name1")) - .setSize(1) - .get(); - assertHitCount(searchResponse, 1); - XContentSource source = xContentSource(searchResponse.getHits().getAt(0).getSourceRef()); - assertThat(source.getValue("result.input.payload.hits.total"), equalTo((Object) 1)); - } -} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java index 88da927ed66..51742863b05 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/input/http/HttpInputTests.java @@ -220,12 +220,9 @@ public class HttpInputTests extends ESTestCase { .endObject(); XContentParser parser = createParser(builder); parser.nextToken(); - try { - httpParser.parseInput("_id", parser); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), is("unsupported http method [_METHOD]")); - } + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> httpParser.parseInput("_id", parser)); + assertThat(e.getMessage(), is("unsupported http method [_METHOD]")); } public void testThatHeadersAreIncludedInPayload() throws Exception { diff --git a/plugin/src/test/resources/rest-api-spec/test/watcher/execute_watch/60_http_input.yml b/plugin/src/test/resources/rest-api-spec/test/watcher/execute_watch/60_http_input.yml new file mode 100644 index 00000000000..8a9ba14cb84 --- /dev/null +++ b/plugin/src/test/resources/rest-api-spec/test/watcher/execute_watch/60_http_input.yml @@ -0,0 +1,58 @@ +--- +setup: + - do: + cluster.health: + wait_for_status: yellow + +--- +"HTTP input supports extracting of keys": + + - do: + cluster.state: {} + - set: { metadata.cluster_uuid : cluster_uuid } + - set: { master_node: master } + + - do: + nodes.info: {} + - set: { nodes.$master.http.publish_address: http_host } + + - do: + xpack.watcher.execute_watch: + body: > + { + "watch" : { + "trigger": { + "schedule": { + "interval": "1s" + } + }, + "input" : { + "http": { + "request": { + "url": "http://${http_host}/_cluster/health", + "auth" : { + "basic" : { + "username" : "x_pack_rest_user", + "password" : "x-pack-test-password" + } + } + }, + "extract": [ "timed_out", "cluster_name" ] + } + }, + "actions": { + "log": { + "logging": { + "text": "executed at {{ctx.execution_time}}" + } + } + } + } + } + + - match: { watch_record.result.input.payload.timed_out: false } + - match: { watch_record.result.input.payload._status_code: 200 } + - is_true: watch_record.result.input.payload._headers + - is_true: watch_record.result.input.payload.cluster_name + # not part of the extract keys, should not occur + - is_false: watch_record.result.input.payload.status diff --git a/qa/smoke-test-watcher-with-security/build.gradle b/qa/smoke-test-watcher-with-security/build.gradle index 36ce6681917..55280b4b791 100644 --- a/qa/smoke-test-watcher-with-security/build.gradle +++ b/qa/smoke-test-watcher-with-security/build.gradle @@ -32,6 +32,8 @@ integTestCluster { extraConfigFile 'x-pack/roles.yml', 'roles.yml' setupCommand 'setupTestAdminUser', 'bin/x-pack/users', 'useradd', 'test_admin', '-p', 'x-pack-test-password', '-r', 'superuser' + setupCommand 'setupXpackUserForTests', + 'bin/x-pack/users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'watcher_manager' setupCommand 'setupWatcherManagerUser', 'bin/x-pack/users', 'useradd', 'watcher_manager', '-p', 'x-pack-test-password', '-r', 'watcher_manager' setupCommand 'setupPowerlessUser', From 5fd68959a0056d6e3b3403fcea2d32dfdff4ef7e Mon Sep 17 00:00:00 2001 From: David Roberts Date: Mon, 11 Dec 2017 13:01:16 +0000 Subject: [PATCH 03/10] [ML] Make datafeeds run-as the user who created/updated them (elastic/x-pack-elasticsearch#3254) This is the ML equivalent of what was done for Watcher in elastic/x-pack-elasticsearch#2808. For security reasons, ML datafeeds should not run as the _xpack user. Instead, they record the security headers from the request to create/update them, and reuse these when performing the search to retrieve data for analysis. Relates elastic/x-pack-elasticsearch#1071 Original commit: elastic/x-pack-elasticsearch@29f85de404a8afb26d9f43cfe1fbef103446ea5f --- .../xpack/ml/MlClientHelper.java | 72 +++++++++++ .../elasticsearch/xpack/ml/MlMetadata.java | 24 +++- .../xpack/ml/action/PutDatafeedAction.java | 14 +-- .../xpack/ml/action/StartDatafeedAction.java | 7 +- .../xpack/ml/action/UpdateDatafeedAction.java | 5 +- .../xpack/ml/datafeed/DatafeedConfig.java | 36 +++++- .../xpack/ml/datafeed/DatafeedJobBuilder.java | 5 +- .../xpack/ml/datafeed/DatafeedManager.java | 12 +- .../xpack/ml/datafeed/DatafeedUpdate.java | 15 ++- .../aggregation/AggregationDataExtractor.java | 3 +- .../AggregationDataExtractorContext.java | 6 +- .../AggregationDataExtractorFactory.java | 3 +- .../chunked/ChunkedDataExtractor.java | 3 +- .../chunked/ChunkedDataExtractorContext.java | 5 +- .../chunked/ChunkedDataExtractorFactory.java | 3 +- .../extractor/scroll/ScrollDataExtractor.java | 11 +- .../scroll/ScrollDataExtractorContext.java | 5 +- .../scroll/ScrollDataExtractorFactory.java | 4 +- .../xpack/ml/MlClientHelperTests.java | 117 ++++++++++++++++++ .../xpack/ml/MlMetadataTests.java | 55 ++++---- .../ml/action/CloseJobActionRequestTests.java | 20 +-- .../ml/action/StartDatafeedActionTests.java | 6 +- .../StopDatafeedActionRequestTests.java | 21 ++-- .../ml/datafeed/DatafeedManagerTests.java | 11 +- .../datafeed/DatafeedNodeSelectorTests.java | 36 +++--- .../ml/datafeed/DatafeedUpdateTests.java | 40 +++--- .../AggregationDataExtractorTests.java | 11 +- .../chunked/ChunkedDataExtractorTests.java | 6 +- .../scroll/ScrollDataExtractorTests.java | 35 +++--- .../ml/integration/DatafeedJobsRestIT.java | 112 +++++++++++++---- qa/smoke-test-ml-with-security/roles.yml | 2 +- 31 files changed, 520 insertions(+), 185 deletions(-) create mode 100644 plugin/src/main/java/org/elasticsearch/xpack/ml/MlClientHelper.java create mode 100644 plugin/src/test/java/org/elasticsearch/xpack/ml/MlClientHelperTests.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlClientHelper.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlClientHelper.java new file mode 100644 index 00000000000..aca1acd4b3b --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlClientHelper.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.security.authc.Authentication; +import org.elasticsearch.xpack.security.authc.AuthenticationService; + +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin; + +/** + * A helper class for actions which decides if we should run via the _xpack user and set ML as origin + * or if we should use the run_as functionality by setting the correct headers + */ +public class MlClientHelper { + + /** + * List of headers that are related to security + */ + public static final Set SECURITY_HEADER_FILTERS = Sets.newHashSet(AuthenticationService.RUN_AS_USER_HEADER, + Authentication.AUTHENTICATION_KEY); + + /** + * Execute a client operation and return the response, try to run a datafeed search with least privileges, when headers exist + * + * @param datafeedConfig The config for a datafeed + * @param client The client used to query + * @param supplier The action to run + * @return An instance of the response class + */ + public static T execute(DatafeedConfig datafeedConfig, Client client, Supplier supplier) { + return execute(datafeedConfig.getHeaders(), client, supplier); + } + + /** + * Execute a client operation and return the response, try to run an action with least privileges, when headers exist + * + * @param headers Request headers, ideally including security headers + * @param client The client used to query + * @param supplier The action to run + * @return An instance of the response class + */ + public static T execute(Map headers, Client client, Supplier supplier) { + // no headers, we will have to use the xpack internal user for our execution by specifying the ml origin + if (headers == null || headers.isEmpty()) { + try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN)) { + return supplier.get(); + } + } else { + try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashContext()) { + Map filteredHeaders = headers.entrySet().stream() + .filter(e -> SECURITY_HEADER_FILTERS.contains(e.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + client.threadPool().getThreadContext().copyHeaders(filteredHeaders.entrySet()); + return supplier.get(); + } + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java index 54075448a23..fb7cba02392 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -48,6 +49,7 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.function.Supplier; +import java.util.stream.Collectors; public class MlMetadata implements MetaData.Custom { @@ -101,7 +103,7 @@ public class MlMetadata implements MetaData.Custom { } public Set expandDatafeedIds(String expression, boolean allowNoDatafeeds) { - return NameResolver.newUnaliased(datafeeds.keySet(), datafeedId -> ExceptionsHelper.missingDatafeedException(datafeedId)) + return NameResolver.newUnaliased(datafeeds.keySet(), ExceptionsHelper::missingDatafeedException) .expand(expression, allowNoDatafeeds); } @@ -285,7 +287,7 @@ public class MlMetadata implements MetaData.Custom { return this; } - public Builder putDatafeed(DatafeedConfig datafeedConfig) { + public Builder putDatafeed(DatafeedConfig datafeedConfig, ThreadContext threadContext) { if (datafeeds.containsKey(datafeedConfig.getId())) { throw new ResourceAlreadyExistsException("A datafeed with id [" + datafeedConfig.getId() + "] already exists"); } @@ -293,6 +295,17 @@ public class MlMetadata implements MetaData.Custom { checkJobIsAvailableForDatafeed(jobId); Job job = jobs.get(jobId); DatafeedJobValidator.validate(datafeedConfig, job); + + if (threadContext != null) { + // Adjust the request, adding security headers from the current thread context + DatafeedConfig.Builder builder = new DatafeedConfig.Builder(datafeedConfig); + Map headers = threadContext.getHeaders().entrySet().stream() + .filter(e -> MlClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + builder.setHeaders(headers); + datafeedConfig = builder.build(); + } + datafeeds.put(datafeedConfig.getId(), datafeedConfig); return this; } @@ -309,7 +322,7 @@ public class MlMetadata implements MetaData.Custom { } } - public Builder updateDatafeed(DatafeedUpdate update, PersistentTasksCustomMetaData persistentTasks) { + public Builder updateDatafeed(DatafeedUpdate update, PersistentTasksCustomMetaData persistentTasks, ThreadContext threadContext) { String datafeedId = update.getId(); DatafeedConfig oldDatafeedConfig = datafeeds.get(datafeedId); if (oldDatafeedConfig == null) { @@ -317,7 +330,7 @@ public class MlMetadata implements MetaData.Custom { } checkDatafeedIsStopped(() -> Messages.getMessage(Messages.DATAFEED_CANNOT_UPDATE_IN_CURRENT_STATE, datafeedId, DatafeedState.STARTED), datafeedId, persistentTasks); - DatafeedConfig newDatafeedConfig = update.apply(oldDatafeedConfig); + DatafeedConfig newDatafeedConfig = update.apply(oldDatafeedConfig, threadContext); if (newDatafeedConfig.getJobId().equals(oldDatafeedConfig.getJobId()) == false) { checkJobIsAvailableForDatafeed(newDatafeedConfig.getJobId()); } @@ -393,14 +406,13 @@ public class MlMetadata implements MetaData.Custom { putJob(jobBuilder.build(), true); } - public void checkJobHasNoDatafeed(String jobId) { + void checkJobHasNoDatafeed(String jobId) { Optional datafeed = getDatafeedByJobId(jobId); if (datafeed.isPresent()) { throw ExceptionsHelper.conflictStatusException("Cannot delete job [" + jobId + "] because datafeed [" + datafeed.get().getId() + "] refers to it"); } } - } /** diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java index bd69afd1046..0175e71c50f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java @@ -49,7 +49,9 @@ import org.elasticsearch.xpack.security.authz.RoleDescriptor; import org.elasticsearch.xpack.security.support.Exceptions; import java.io.IOException; +import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; public class PutDatafeedAction extends Action { @@ -218,8 +220,7 @@ public class PutDatafeedAction extends Action listener) throws Exception { + protected void masterOperation(Request request, ClusterState state, ActionListener listener) { // If security is enabled only create the datafeed if the user requesting creation has // permission to read the indices the datafeed is going to read from if (securityEnabled) { @@ -266,6 +267,7 @@ public class PutDatafeedAction extends Action listener) { + clusterService.submitStateUpdateTask( "put-datafeed-" + request.getDatafeed().getId(), new AckedClusterStateUpdateTask(request, listener) { @@ -275,13 +277,11 @@ public class PutDatafeedAction extends Action { @@ -437,7 +434,7 @@ public class StartDatafeedAction super(settings, NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, Request::new); this.licenseState = licenseState; this.persistentTasksService = persistentTasksService; - this.client = clientWithOrigin(client, ML_ORIGIN); + this.client = client; } @Override @@ -453,7 +450,7 @@ public class StartDatafeedAction } @Override - protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { + protected void masterOperation(Request request, ClusterState state, ActionListener listener) { DatafeedParams params = request.params; if (licenseState.isMachineLearningAllowed()) { ActionListener> finalListener = new ActionListener>() { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateDatafeedAction.java index 2b64916dea4..e1c2591a7a6 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateDatafeedAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/UpdateDatafeedAction.java @@ -143,8 +143,7 @@ public class UpdateDatafeedAction extends Action listener) - throws Exception { + protected void masterOperation(Request request, ClusterState state, ActionListener listener) { clusterService.submitStateUpdateTask("update-datafeed-" + request.getUpdate().getId(), new AckedClusterStateUpdateTask(request, listener) { private volatile DatafeedConfig updatedDatafeed; @@ -164,7 +163,7 @@ public class UpdateDatafeedAction extends Action implements public static final ParseField SCRIPT_FIELDS = new ParseField("script_fields"); public static final ParseField SOURCE = new ParseField("_source"); public static final ParseField CHUNKING_CONFIG = new ParseField("chunking_config"); + public static final ParseField HEADERS = new ParseField("headers"); // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly public static final ObjectParser METADATA_PARSER = new ObjectParser<>("datafeed_config", true, Builder::new); @@ -117,6 +119,7 @@ public class DatafeedConfig extends AbstractDiffable implements // TODO this is to read former _source field. Remove in v7.0.0 parser.declareBoolean((builder, value) -> {}, SOURCE); parser.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSERS.get(parserType), CHUNKING_CONFIG); + parser.declareObject(Builder::setHeaders, (p, c) -> p.mapStrings(), HEADERS); } } @@ -140,10 +143,11 @@ public class DatafeedConfig extends AbstractDiffable implements private final List scriptFields; private final Integer scrollSize; private final ChunkingConfig chunkingConfig; + private final Map headers; private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, List types, QueryBuilder query, AggregatorFactories.Builder aggregations, List scriptFields, - Integer scrollSize, ChunkingConfig chunkingConfig) { + Integer scrollSize, ChunkingConfig chunkingConfig, Map headers) { this.id = id; this.jobId = jobId; this.queryDelay = queryDelay; @@ -155,6 +159,7 @@ public class DatafeedConfig extends AbstractDiffable implements this.scriptFields = scriptFields; this.scrollSize = scrollSize; this.chunkingConfig = chunkingConfig; + this.headers = Objects.requireNonNull(headers); } public DatafeedConfig(StreamInput in) throws IOException { @@ -185,6 +190,11 @@ public class DatafeedConfig extends AbstractDiffable implements in.readBoolean(); } this.chunkingConfig = in.readOptionalWriteable(ChunkingConfig::new); + if (in.getVersion().onOrAfter(Version.V_6_2_0)) { + this.headers = in.readMap(StreamInput::readString, StreamInput::readString); + } else { + this.headers = Collections.emptyMap(); + } } public String getId() { @@ -245,6 +255,10 @@ public class DatafeedConfig extends AbstractDiffable implements return chunkingConfig; } + public Map getHeaders() { + return headers; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(id); @@ -277,6 +291,9 @@ public class DatafeedConfig extends AbstractDiffable implements out.writeBoolean(false); } out.writeOptionalWriteable(chunkingConfig); + if (out.getVersion().onOrAfter(Version.V_6_2_0)) { + out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString); + } } @Override @@ -311,6 +328,10 @@ public class DatafeedConfig extends AbstractDiffable implements if (chunkingConfig != null) { builder.field(CHUNKING_CONFIG.getPreferredName(), chunkingConfig); } + if (headers != null && headers.isEmpty() == false + && params.paramAsBoolean(ToXContentParams.FOR_CLUSTER_STATE, false) == true) { + builder.field(HEADERS.getPreferredName(), headers); + } return builder; } @@ -341,13 +362,14 @@ public class DatafeedConfig extends AbstractDiffable implements && Objects.equals(this.scrollSize, that.scrollSize) && Objects.equals(this.aggregations, that.aggregations) && Objects.equals(this.scriptFields, that.scriptFields) - && Objects.equals(this.chunkingConfig, that.chunkingConfig); + && Objects.equals(this.chunkingConfig, that.chunkingConfig) + && Objects.equals(this.headers, that.headers); } @Override public int hashCode() { return Objects.hash(id, jobId, frequency, queryDelay, indices, types, query, scrollSize, aggregations, scriptFields, - chunkingConfig); + chunkingConfig, headers); } @Override @@ -420,6 +442,7 @@ public class DatafeedConfig extends AbstractDiffable implements private List scriptFields; private Integer scrollSize = DEFAULT_SCROLL_SIZE; private ChunkingConfig chunkingConfig; + private Map headers = Collections.emptyMap(); public Builder() { } @@ -442,6 +465,7 @@ public class DatafeedConfig extends AbstractDiffable implements this.scriptFields = config.scriptFields; this.scrollSize = config.scrollSize; this.chunkingConfig = config.chunkingConfig; + this.headers = config.headers; } public void setId(String datafeedId) { @@ -452,6 +476,10 @@ public class DatafeedConfig extends AbstractDiffable implements this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); } + public void setHeaders(Map headers) { + this.headers = headers; + } + public void setIndices(List indices) { this.indices = ExceptionsHelper.requireNonNull(indices, INDICES.getPreferredName()); } @@ -516,7 +544,7 @@ public class DatafeedConfig extends AbstractDiffable implements setDefaultChunkingConfig(); setDefaultQueryDelay(); return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize, - chunkingConfig); + chunkingConfig, headers); } void validateAggregations() { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java index 181634411c1..3ada8792747 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilder.java @@ -25,9 +25,6 @@ import java.util.Objects; import java.util.function.Consumer; import java.util.function.Supplier; -import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; -import static org.elasticsearch.xpack.ClientHelper.clientWithOrigin; - public class DatafeedJobBuilder { private final Client client; @@ -36,7 +33,7 @@ public class DatafeedJobBuilder { private final Supplier currentTimeSupplier; public DatafeedJobBuilder(Client client, JobProvider jobProvider, Auditor auditor, Supplier currentTimeSupplier) { - this.client = clientWithOrigin(client, ML_ORIGIN); + this.client = client; this.jobProvider = Objects.requireNonNull(jobProvider); this.auditor = Objects.requireNonNull(auditor); this.currentTimeSupplier = Objects.requireNonNull(currentTimeSupplier); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java index a07c0dec464..928f8ecb0cf 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManager.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.FutureUtils; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ml.MachineLearning; @@ -463,7 +464,16 @@ public class DatafeedManager extends AbstractComponent { } private void runTask(StartDatafeedAction.DatafeedTask task) { - innerRun(runningDatafeedsOnThisNode.get(task.getAllocationId()), task.getDatafeedStartTime(), task.getEndTime()); + // This clearing of the thread context is not strictly necessary. Every action performed by the + // datafeed _should_ be done using the MlClientHelper, which will set the appropriate thread + // context. However, by clearing the thread context here if anyone forgets to use MlClientHelper + // somewhere else in the datafeed code then it should cause a failure in the same way in single + // and multi node clusters. If we didn't clear the thread context here then there's a risk that + // a context with sufficient permissions would coincidentally be in force in some single node + // tests, leading to bugs not caught in CI due to many tests running in single node test clusters. + try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().stashContext()) { + innerRun(runningDatafeedsOnThisNode.get(task.getAllocationId()), task.getDatafeedStartTime(), task.getEndTime()); + } } @Override diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java index 00569b6fab0..a3b856967f2 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -20,6 +21,7 @@ import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.ml.MlClientHelper; import org.elasticsearch.xpack.ml.datafeed.extractor.ExtractorUtils; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; @@ -29,7 +31,9 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; /** * A datafeed update contains partial properties to update a {@link DatafeedConfig}. @@ -260,7 +264,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { * Applies the update to the given {@link DatafeedConfig} * @return a new {@link DatafeedConfig} that contains the update */ - public DatafeedConfig apply(DatafeedConfig datafeedConfig) { + public DatafeedConfig apply(DatafeedConfig datafeedConfig, ThreadContext threadContext) { if (id.equals(datafeedConfig.getId()) == false) { throw new IllegalArgumentException("Cannot apply update to datafeedConfig with different id"); } @@ -296,6 +300,15 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { if (chunkingConfig != null) { builder.setChunkingConfig(chunkingConfig); } + + if (threadContext != null) { + // Adjust the request, adding security headers from the current thread context + Map headers = threadContext.getHeaders().entrySet().stream() + .filter(e -> MlClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + builder.setHeaders(headers); + } + return builder.build(); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java index 6624299fd7f..61cef8e9643 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.xpack.ml.MlClientHelper; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.ExtractorUtils; @@ -111,7 +112,7 @@ class AggregationDataExtractor implements DataExtractor { } protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) { - return searchRequestBuilder.get(); + return MlClientHelper.execute(context.headers, client, searchRequestBuilder::get); } private SearchRequestBuilder buildSearchRequest() { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorContext.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorContext.java index eefd32ef1fd..4958e39decc 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorContext.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorContext.java @@ -9,6 +9,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; @@ -24,9 +25,11 @@ class AggregationDataExtractorContext { final long start; final long end; final boolean includeDocCount; + final Map headers; AggregationDataExtractorContext(String jobId, String timeField, Set fields, List indices, List types, - QueryBuilder query, AggregatorFactories.Builder aggs, long start, long end, boolean includeDocCount) { + QueryBuilder query, AggregatorFactories.Builder aggs, long start, long end, boolean includeDocCount, + Map headers) { this.jobId = Objects.requireNonNull(jobId); this.timeField = Objects.requireNonNull(timeField); this.fields = Objects.requireNonNull(fields); @@ -37,5 +40,6 @@ class AggregationDataExtractorContext { this.start = start; this.end = end; this.includeDocCount = includeDocCount; + this.headers = headers; } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java index db081637624..a353b5a6b67 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactory.java @@ -39,7 +39,8 @@ public class AggregationDataExtractorFactory implements DataExtractorFactory { datafeedConfig.getAggregations(), Intervals.alignToCeil(start, histogramInterval), Intervals.alignToFloor(end, histogramInterval), - job.getAnalysisConfig().getSummaryCountFieldName().equals(DatafeedConfig.DOC_COUNT)); + job.getAnalysisConfig().getSummaryCountFieldName().equals(DatafeedConfig.DOC_COUNT), + datafeedConfig.getHeaders()); return new AggregationDataExtractor(client, dataExtractorContext); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java index c7a87086aa9..158fdfa8c58 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java @@ -15,6 +15,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.min.Min; +import org.elasticsearch.xpack.ml.MlClientHelper; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; import org.elasticsearch.xpack.ml.datafeed.extractor.ExtractorUtils; @@ -133,7 +134,7 @@ public class ChunkedDataExtractor implements DataExtractor { } protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) { - return searchRequestBuilder.get(); + return MlClientHelper.execute(context.headers, client, searchRequestBuilder::get); } private Optional getNextStream() throws IOException { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java index 4c2cdfd1b12..8efc1e2f7ea 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import java.util.List; +import java.util.Map; import java.util.Objects; class ChunkedDataExtractorContext { @@ -29,10 +30,11 @@ class ChunkedDataExtractorContext { final long end; final TimeValue chunkSpan; final TimeAligner timeAligner; + final Map headers; ChunkedDataExtractorContext(String jobId, String timeField, List indices, List types, QueryBuilder query, int scrollSize, long start, long end, @Nullable TimeValue chunkSpan, - TimeAligner timeAligner) { + TimeAligner timeAligner, Map headers) { this.jobId = Objects.requireNonNull(jobId); this.timeField = Objects.requireNonNull(timeField); this.indices = indices.toArray(new String[indices.size()]); @@ -43,5 +45,6 @@ class ChunkedDataExtractorContext { this.end = end; this.chunkSpan = chunkSpan; this.timeAligner = Objects.requireNonNull(timeAligner); + this.headers = headers; } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java index fe122bce379..fc438e1b017 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java @@ -41,7 +41,8 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory { timeAligner.alignToCeil(start), timeAligner.alignToFloor(end), datafeedConfig.getChunkingConfig().getTimeSpan(), - timeAligner); + timeAligner, + datafeedConfig.getHeaders()); return new ChunkedDataExtractor(client, dataExtractorFactory, dataExtractorContext); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index 2e0fec5da3e..c0a6cb3df54 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -20,6 +20,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.xpack.ml.MlClientHelper; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.ExtractorUtils; import org.elasticsearch.xpack.ml.utils.DomainSplitFunction; @@ -98,7 +99,7 @@ class ScrollDataExtractor implements DataExtractor { } protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) { - return searchRequestBuilder.get(); + return MlClientHelper.execute(context.headers, client, searchRequestBuilder::get); } private SearchRequestBuilder buildSearchRequest(long start) { @@ -182,7 +183,7 @@ class ScrollDataExtractor implements DataExtractor { private InputStream continueScroll() throws IOException { LOGGER.debug("[{}] Continuing scroll with id [{}]", context.jobId, scrollId); - SearchResponse searchResponse = null; + SearchResponse searchResponse; try { searchResponse = executeSearchScrollRequest(scrollId); } catch (SearchPhaseExecutionException searchExecutionException) { @@ -208,10 +209,10 @@ class ScrollDataExtractor implements DataExtractor { } protected SearchResponse executeSearchScrollRequest(String scrollId) { - return SearchScrollAction.INSTANCE.newRequestBuilder(client) + return MlClientHelper.execute(context.headers, client, () -> SearchScrollAction.INSTANCE.newRequestBuilder(client) .setScroll(SCROLL_TIMEOUT) .setScrollId(scrollId) - .get(); + .get()); } private void resetScroll() { @@ -223,7 +224,7 @@ class ScrollDataExtractor implements DataExtractor { if (scrollId != null) { ClearScrollRequest request = new ClearScrollRequest(); request.addScrollId(scrollId); - client.execute(ClearScrollAction.INSTANCE, request).actionGet(); + MlClientHelper.execute(context.headers, client, () -> client.execute(ClearScrollAction.INSTANCE, request).actionGet()); } } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java index 9a852e7c3d1..d1666497d24 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java @@ -9,6 +9,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import java.util.List; +import java.util.Map; import java.util.Objects; class ScrollDataExtractorContext { @@ -22,10 +23,11 @@ class ScrollDataExtractorContext { final int scrollSize; final long start; final long end; + final Map headers; ScrollDataExtractorContext(String jobId, ExtractedFields extractedFields, List indices, List types, QueryBuilder query, List scriptFields, int scrollSize, - long start, long end) { + long start, long end, Map headers) { this.jobId = Objects.requireNonNull(jobId); this.extractedFields = Objects.requireNonNull(extractedFields); this.indices = indices.toArray(new String[indices.size()]); @@ -35,5 +37,6 @@ class ScrollDataExtractorContext { this.scrollSize = scrollSize; this.start = start; this.end = end; + this.headers = headers; } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java index a57609b1069..d059cf380bf 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesAction; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; @@ -46,7 +45,8 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory { datafeedConfig.getScriptFields(), datafeedConfig.getScrollSize(), start, - end); + end, + datafeedConfig.getHeaders()); return new ScrollDataExtractor(client, dataExtractorContext); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlClientHelperTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlClientHelperTests.java new file mode 100644 index 00000000000..dfbc479bc48 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlClientHelperTests.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; +import org.elasticsearch.xpack.security.authc.Authentication; +import org.elasticsearch.xpack.security.authc.AuthenticationService; +import org.junit.Before; + +import java.util.Collections; +import java.util.Map; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.ClientHelper.ACTION_ORIGIN_TRANSIENT_NAME; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MlClientHelperTests extends ESTestCase { + + private Client client = mock(Client.class); + + @Before + public void setupMocks() { + ThreadPool threadPool = mock(ThreadPool.class); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + when(threadPool.getThreadContext()).thenReturn(threadContext); + when(client.threadPool()).thenReturn(threadPool); + + PlainActionFuture searchFuture = PlainActionFuture.newFuture(); + searchFuture.onResponse(new SearchResponse()); + when(client.search(any())).thenReturn(searchFuture); + } + + public void testEmptyHeaders() { + DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed-foo", "foo"); + builder.setIndices(Collections.singletonList("foo-index")); + + assertExecutionWithOrigin(builder.build()); + } + + public void testWithHeaders() { + DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed-foo", "foo"); + builder.setIndices(Collections.singletonList("foo-index")); + Map headers = MapBuilder.newMapBuilder() + .put(Authentication.AUTHENTICATION_KEY, "anything") + .put(AuthenticationService.RUN_AS_USER_HEADER, "anything") + .map(); + builder.setHeaders(headers); + + assertRunAsExecution(builder.build(), h -> { + assertThat(h.keySet(), hasSize(2)); + assertThat(h, hasEntry(Authentication.AUTHENTICATION_KEY, "anything")); + assertThat(h, hasEntry(AuthenticationService.RUN_AS_USER_HEADER, "anything")); + }); + } + + public void testFilteredHeaders() { + DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed-foo", "foo"); + builder.setIndices(Collections.singletonList("foo-index")); + Map unrelatedHeaders = MapBuilder.newMapBuilder() + .put(randomAlphaOfLength(10), "anything") + .map(); + builder.setHeaders(unrelatedHeaders); + + assertRunAsExecution(builder.build(), h -> assertThat(h.keySet(), hasSize(0))); + } + + /** + * This method executes a search and checks if the thread context was enriched with the ml origin + */ + private void assertExecutionWithOrigin(DatafeedConfig datafeedConfig) { + MlClientHelper.execute(datafeedConfig, client, () -> { + Object origin = client.threadPool().getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME); + assertThat(origin, is(ML_ORIGIN)); + + // Check that headers are not set + Map headers = client.threadPool().getThreadContext().getHeaders(); + assertThat(headers, not(hasEntry(Authentication.AUTHENTICATION_KEY, "anything"))); + assertThat(headers, not(hasEntry(AuthenticationService.RUN_AS_USER_HEADER, "anything"))); + + return client.search(new SearchRequest()).actionGet(); + }); + } + + /** + * This method executes a search and ensures no stashed origin thread context was created, so that the regular node + * client was used, to emulate a run_as function + */ + public void assertRunAsExecution(DatafeedConfig datafeedConfig, Consumer> consumer) { + MlClientHelper.execute(datafeedConfig, client, () -> { + Object origin = client.threadPool().getThreadContext().getTransient(ACTION_ORIGIN_TRANSIENT_NAME); + assertThat(origin, is(nullValue())); + + consumer.accept(client.threadPool().getThreadContext().getHeaders()); + return client.search(new SearchRequest()).actionGet(); + }); + } +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java index 23c3a73b438..3ff569f81e1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.xpack.ml.job.config.JobTaskStatus; import org.elasticsearch.xpack.ml.job.config.JobTests; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; -import java.io.IOException; import java.util.Collections; import java.util.Date; import java.util.Map; @@ -62,7 +61,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { } job = new Job.Builder(job).setAnalysisConfig(analysisConfig).build(); builder.putJob(job, false); - builder.putDatafeed(datafeedConfig); + builder.putDatafeed(datafeedConfig, null); } else { builder.putJob(job, false); } @@ -163,7 +162,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1); + builder.putDatafeed(datafeedConfig1, null); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder.deleteJob(job1.getId(), new PersistentTasksCustomMetaData(0L, Collections.emptyMap()))); @@ -183,7 +182,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1); + builder.putDatafeed(datafeedConfig1, null); MlMetadata result = builder.build(); assertThat(result.getJobs().get("job_id"), sameInstance(job1)); @@ -200,7 +199,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", "missing-job").build(); MlMetadata.Builder builder = new MlMetadata.Builder(); - expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1)); + expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, null)); } public void testPutDatafeed_failBecauseJobIsBeingDeleted() { @@ -209,7 +208,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1)); + expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, null)); } public void testPutDatafeed_failBecauseDatafeedIdIsAlreadyTaken() { @@ -217,9 +216,9 @@ public class MlMetadataTests extends AbstractSerializingTestCase { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1); + builder.putDatafeed(datafeedConfig1, null); - expectThrows(ResourceAlreadyExistsException.class, () -> builder.putDatafeed(datafeedConfig1)); + expectThrows(ResourceAlreadyExistsException.class, () -> builder.putDatafeed(datafeedConfig1, null)); } public void testPutDatafeed_failBecauseJobAlreadyHasDatafeed() { @@ -228,10 +227,10 @@ public class MlMetadataTests extends AbstractSerializingTestCase { DatafeedConfig datafeedConfig2 = createDatafeedConfig("datafeed2", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1); + builder.putDatafeed(datafeedConfig1, null); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> builder.putDatafeed(datafeedConfig2)); + () -> builder.putDatafeed(datafeedConfig2, null)); assertThat(e.status(), equalTo(RestStatus.CONFLICT)); } @@ -245,7 +244,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1.build(now), false); - expectThrows(ElasticsearchStatusException.class, () -> builder.putDatafeed(datafeedConfig1)); + expectThrows(ElasticsearchStatusException.class, () -> builder.putDatafeed(datafeedConfig1, null)); } public void testUpdateDatafeed() { @@ -253,12 +252,12 @@ public class MlMetadataTests extends AbstractSerializingTestCase { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1); + builder.putDatafeed(datafeedConfig1, null); MlMetadata beforeMetadata = builder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); update.setScrollSize(5000); - MlMetadata updatedMetadata = new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null).build(); + MlMetadata updatedMetadata = new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null).build(); DatafeedConfig updatedDatafeed = updatedMetadata.getDatafeed(datafeedConfig1.getId()); assertThat(updatedDatafeed.getJobId(), equalTo(datafeedConfig1.getJobId())); @@ -270,7 +269,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { public void testUpdateDatafeed_failBecauseDatafeedDoesNotExist() { DatafeedUpdate.Builder update = new DatafeedUpdate.Builder("job_id"); update.setScrollSize(5000); - expectThrows(ResourceNotFoundException.class, () -> new MlMetadata.Builder().updateDatafeed(update.build(), null).build()); + expectThrows(ResourceNotFoundException.class, () -> new MlMetadata.Builder().updateDatafeed(update.build(), null, null).build()); } public void testUpdateDatafeed_failBecauseDatafeedIsNotStopped() { @@ -278,7 +277,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1); + builder.putDatafeed(datafeedConfig1, null); MlMetadata beforeMetadata = builder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -290,7 +289,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { update.setScrollSize(5000); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), tasksInProgress)); + () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), tasksInProgress, null)); assertThat(e.status(), equalTo(RestStatus.CONFLICT)); } @@ -299,14 +298,14 @@ public class MlMetadataTests extends AbstractSerializingTestCase { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1); + builder.putDatafeed(datafeedConfig1, null); MlMetadata beforeMetadata = builder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); update.setJobId(job1.getId() + "_2"); expectThrows(ResourceNotFoundException.class, - () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null)); + () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null)); } public void testUpdateDatafeed_failBecauseNewJobHasAnotherDatafeedAttached() { @@ -318,15 +317,15 @@ public class MlMetadataTests extends AbstractSerializingTestCase { MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); builder.putJob(job2.build(), false); - builder.putDatafeed(datafeedConfig1); - builder.putDatafeed(datafeedConfig2); + builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig2, null); MlMetadata beforeMetadata = builder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); update.setJobId(job2.getId()); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null)); + () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null)); assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.getMessage(), equalTo("A datafeed [datafeed2] already exists for job [job_id_2]")); } @@ -336,7 +335,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1); + builder.putDatafeed(datafeedConfig1, null); MlMetadata result = builder.build(); assertThat(result.getJobs().get("job_id"), sameInstance(job1)); @@ -377,9 +376,9 @@ public class MlMetadataTests extends AbstractSerializingTestCase { public void testExpandDatafeedIds() { MlMetadata.Builder mlMetadataBuilder = newMlMetadataWithJobs("bar-1", "foo-1", "foo-2"); - mlMetadataBuilder.putDatafeed(createDatafeedConfig("bar-1-feed", "bar-1").build()); - mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-1-feed", "foo-1").build()); - mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-2-feed", "foo-2").build()); + mlMetadataBuilder.putDatafeed(createDatafeedConfig("bar-1-feed", "bar-1").build(), null); + mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-1-feed", "foo-1").build(), null); + mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-2-feed", "foo-2").build(), null); MlMetadata mlMetadata = mlMetadataBuilder.build(); @@ -399,7 +398,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { } @Override - protected MlMetadata mutateInstance(MlMetadata instance) throws IOException { + protected MlMetadata mutateInstance(MlMetadata instance) { Map jobs = instance.getJobs(); Map datafeeds = instance.getDatafeeds(); MlMetadata.Builder metadataBuilder = new MlMetadata.Builder(); @@ -408,7 +407,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { metadataBuilder.putJob(entry.getValue(), true); } for (Map.Entry entry : datafeeds.entrySet()) { - metadataBuilder.putDatafeed(entry.getValue()); + metadataBuilder.putDatafeed(entry.getValue(), null); } switch (between(0, 1)) { @@ -429,7 +428,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { } randomJob = new Job.Builder(randomJob).setAnalysisConfig(analysisConfig).build(); metadataBuilder.putJob(randomJob, false); - metadataBuilder.putDatafeed(datafeedConfig); + metadataBuilder.putDatafeed(datafeedConfig, null); break; default: throw new AssertionError("Illegal randomisation branch"); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java index ff21c6e192b..987116e3101 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/CloseJobActionRequestTests.java @@ -80,7 +80,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa MlMetadata.Builder mlBuilder = new MlMetadata.Builder(); mlBuilder.putJob(BaseMlIntegTestCase.createScheduledJob("job_id").build(new Date()), false); mlBuilder.putDatafeed(BaseMlIntegTestCase.createDatafeed("datafeed_id", "job_id", - Collections.singletonList("*"))); + Collections.singletonList("*")), null); final PersistentTasksCustomMetaData.Builder startDataFeedTaskBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", null, JobState.OPENED, startDataFeedTaskBuilder); addTask("datafeed_id", 0L, null, DatafeedState.STARTED, startDataFeedTaskBuilder); @@ -147,7 +147,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa request.setForce(true); CloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); assertEquals(Arrays.asList("job_id_1", "job_id_2", "job_id_3"), openJobs); - assertEquals(Arrays.asList("job_id_4"), closingJobs); + assertEquals(Collections.singletonList("job_id_4"), closingJobs); request.setForce(false); expectThrows(ElasticsearchStatusException.class, @@ -171,7 +171,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa CloseJobAction.Request request = new CloseJobAction.Request("job_id_1"); CloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); - assertEquals(Arrays.asList("job_id_1"), openJobs); + assertEquals(Collections.singletonList("job_id_1"), openJobs); assertEquals(Collections.emptyList(), closingJobs); // Job without task is closed @@ -219,7 +219,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa request.setForce(true); CloseJobAction.resolveAndValidateJobId(request, cs1, openJobs, closingJobs); - assertEquals(Arrays.asList("job_id_failed"), openJobs); + assertEquals(Collections.singletonList("job_id_failed"), openJobs); assertEquals(Collections.emptyList(), closingJobs); openJobs.clear(); @@ -252,7 +252,7 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa CloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("_all"), cs1, openJobs, closingJobs); assertEquals(Arrays.asList("job_id_open-1", "job_id_open-2"), openJobs); - assertEquals(Arrays.asList("job_id_closing"), closingJobs); + assertEquals(Collections.singletonList("job_id_closing"), closingJobs); openJobs.clear(); closingJobs.clear(); @@ -264,12 +264,12 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa CloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("job_id_closing"), cs1, openJobs, closingJobs); assertEquals(Collections.emptyList(), openJobs); - assertEquals(Arrays.asList("job_id_closing"), closingJobs); + assertEquals(Collections.singletonList("job_id_closing"), closingJobs); openJobs.clear(); closingJobs.clear(); CloseJobAction.resolveAndValidateJobId(new CloseJobAction.Request("job_id_open-1"), cs1, openJobs, closingJobs); - assertEquals(Arrays.asList("job_id_open-1"), openJobs); + assertEquals(Collections.singletonList("job_id_open-1"), openJobs); assertEquals(Collections.emptyList(), closingJobs); openJobs.clear(); closingJobs.clear(); @@ -316,8 +316,8 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa } public void testBuildWaitForCloseRequest() { - List openJobIds = Arrays.asList(new String[] {"openjob1", "openjob2"}); - List closingJobIds = Arrays.asList(new String[] {"closingjob1"}); + List openJobIds = Arrays.asList("openjob1", "openjob2"); + List closingJobIds = Collections.singletonList("closingjob1"); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("openjob1", null, JobState.OPENED, tasksBuilder); @@ -343,4 +343,4 @@ public class CloseJobActionRequestTests extends AbstractStreamableXContentTestCa tasks.updateTaskStatus(MlMetadata.datafeedTaskId(datafeedId), state); } -} \ No newline at end of file +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StartDatafeedActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StartDatafeedActionTests.java index 57fd108fe48..c371e0306d2 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StartDatafeedActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StartDatafeedActionTests.java @@ -43,7 +43,7 @@ public class StartDatafeedActionTests extends ESTestCase { PersistentTasksCustomMetaData tasks = PersistentTasksCustomMetaData.builder().build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) - .putDatafeed(datafeedConfig1) + .putDatafeed(datafeedConfig1, null) .build(); Exception e = expectThrows(ElasticsearchStatusException.class, () -> StartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks)); @@ -60,7 +60,7 @@ public class StartDatafeedActionTests extends ESTestCase { PersistentTasksCustomMetaData tasks = tasksBuilder.build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) - .putDatafeed(datafeedConfig1) + .putDatafeed(datafeedConfig1, null) .build(); StartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); @@ -76,7 +76,7 @@ public class StartDatafeedActionTests extends ESTestCase { PersistentTasksCustomMetaData tasks = tasksBuilder.build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) - .putDatafeed(datafeedConfig1) + .putDatafeed(datafeedConfig1, null) .build(); StartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StopDatafeedActionRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StopDatafeedActionRequestTests.java index bd8fd0f6eba..18b7714dfe5 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StopDatafeedActionRequestTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/StopDatafeedActionRequestTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.Assignment; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; @@ -66,7 +65,7 @@ public class StopDatafeedActionRequestTests extends AbstractStreamableXContentTe tasksBuilder.addTask(MlMetadata.datafeedTaskId("foo"), StartDatafeedAction.TASK_NAME, new StartDatafeedAction.DatafeedParams("foo", 0L), new Assignment("node_id", "")); tasksBuilder.updateTaskStatus(MlMetadata.datafeedTaskId("foo"), DatafeedState.STARTED); - PersistentTasksCustomMetaData tasks = tasksBuilder.build(); + tasksBuilder.build(); Job job = createDatafeedJob().build(new Date()); MlMetadata mlMetadata1 = new MlMetadata.Builder().putJob(job, false).build(); @@ -76,7 +75,7 @@ public class StopDatafeedActionRequestTests extends AbstractStreamableXContentTe DatafeedConfig datafeedConfig = createDatafeedConfig("foo", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder().putJob(job, false) - .putDatafeed(datafeedConfig) + .putDatafeed(datafeedConfig, null) .build(); StopDatafeedAction.validateDatafeedTask("foo", mlMetadata2); } @@ -88,12 +87,12 @@ public class StopDatafeedActionRequestTests extends AbstractStreamableXContentTe addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder); Job job = BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date()); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed_1", "job_id_1").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder); job = BaseMlIntegTestCase.createScheduledJob("job_id_2").build(new Date()); datafeedConfig = createDatafeedConfig("datafeed_2", "job_id_2").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); PersistentTasksCustomMetaData tasks = tasksBuilder.build(); MlMetadata mlMetadata = mlMetadataBuilder.build(); @@ -102,7 +101,7 @@ public class StopDatafeedActionRequestTests extends AbstractStreamableXContentTe List stoppingDatafeeds = new ArrayList<>(); StopDatafeedAction.resolveDataFeedIds(new StopDatafeedAction.Request("datafeed_1"), mlMetadata, tasks, startedDatafeeds, stoppingDatafeeds); - assertEquals(Arrays.asList("datafeed_1"), startedDatafeeds); + assertEquals(Collections.singletonList("datafeed_1"), startedDatafeeds); assertEquals(Collections.emptyList(), stoppingDatafeeds); startedDatafeeds.clear(); @@ -120,17 +119,17 @@ public class StopDatafeedActionRequestTests extends AbstractStreamableXContentTe addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder); Job job = BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date()); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed_1", "job_id_1").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder); job = BaseMlIntegTestCase.createScheduledJob("job_id_2").build(new Date()); datafeedConfig = createDatafeedConfig("datafeed_2", "job_id_2").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); addTask("datafeed_3", 0L, "node-1", DatafeedState.STOPPING, tasksBuilder); job = BaseMlIntegTestCase.createScheduledJob("job_id_3").build(new Date()); datafeedConfig = createDatafeedConfig("datafeed_3", "job_id_3").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); PersistentTasksCustomMetaData tasks = tasksBuilder.build(); MlMetadata mlMetadata = mlMetadataBuilder.build(); @@ -139,8 +138,8 @@ public class StopDatafeedActionRequestTests extends AbstractStreamableXContentTe List stoppingDatafeeds = new ArrayList<>(); StopDatafeedAction.resolveDataFeedIds(new StopDatafeedAction.Request("_all"), mlMetadata, tasks, startedDatafeeds, stoppingDatafeeds); - assertEquals(Arrays.asList("datafeed_1"), startedDatafeeds); - assertEquals(Arrays.asList("datafeed_3"), stoppingDatafeeds); + assertEquals(Collections.singletonList("datafeed_1"), startedDatafeeds); + assertEquals(Collections.singletonList("datafeed_3"), stoppingDatafeeds); startedDatafeeds.clear(); stoppingDatafeeds.clear(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java index 6a44b5179e6..c17cb8f2dde 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java @@ -16,8 +16,10 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -80,7 +82,7 @@ public class DatafeedManagerTests extends ESTestCase { Job job = createDatafeedJob().build(new Date()); mlMetadata.putJob(job, false); DatafeedConfig datafeed = createDatafeedConfig("datafeed_id", job.getId()).build(); - mlMetadata.putDatafeed(datafeed); + mlMetadata.putDatafeed(datafeed, null); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask(job.getId(), "node_id", JobState.OPENED, tasksBuilder); PersistentTasksCustomMetaData tasks = tasksBuilder.build(); @@ -109,6 +111,7 @@ public class DatafeedManagerTests extends ESTestCase { auditor = mock(Auditor.class); threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); ExecutorService executorService = mock(ExecutorService.class); doAnswer(invocation -> { ((Runnable) invocation.getArguments()[0]).run(); @@ -248,7 +251,7 @@ public class DatafeedManagerTests extends ESTestCase { } } - public void testDatafeedTaskWaitsUntilJobIsOpened() throws Exception { + public void testDatafeedTaskWaitsUntilJobIsOpened() { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) @@ -288,7 +291,7 @@ public class DatafeedManagerTests extends ESTestCase { verify(threadPool, times(1)).executor(MachineLearning.DATAFEED_THREAD_POOL_NAME); } - public void testDatafeedTaskStopsBecauseJobFailedWhileOpening() throws Exception { + public void testDatafeedTaskStopsBecauseJobFailedWhileOpening() { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) @@ -316,7 +319,7 @@ public class DatafeedManagerTests extends ESTestCase { verify(task).stop("job_never_opened", TimeValue.timeValueSeconds(20)); } - public void testDatafeedGetsStoppedWhileWaitingForJobToOpen() throws Exception { + public void testDatafeedGetsStoppedWhileWaitingForJobToOpen() { PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", "node_id", JobState.OPENING, tasksBuilder); ClusterState.Builder cs = ClusterState.builder(clusterService.state()) diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java index 8ce1fc1c983..ede2aea6a15 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java @@ -63,11 +63,11 @@ public class DatafeedNodeSelectorTests extends ESTestCase { .build(); } - public void testSelectNode_GivenJobIsOpened() throws Exception { + public void testSelectNode_GivenJobIsOpened() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo"))); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -81,11 +81,11 @@ public class DatafeedNodeSelectorTests extends ESTestCase { new DatafeedNodeSelector(clusterState, resolver, "datafeed_id").checkDatafeedTaskCanBeCreated(); } - public void testSelectNode_GivenJobIsOpening() throws Exception { + public void testSelectNode_GivenJobIsOpening() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo"))); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -99,13 +99,13 @@ public class DatafeedNodeSelectorTests extends ESTestCase { new DatafeedNodeSelector(clusterState, resolver, "datafeed_id").checkDatafeedTaskCanBeCreated(); } - public void testNoJobTask() throws Exception { + public void testNoJobTask() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); // Using wildcard index name to test for index resolving as well - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*"))); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); mlMetadata = mlMetadataBuilder.build(); tasks = PersistentTasksCustomMetaData.builder().build(); @@ -123,11 +123,11 @@ public class DatafeedNodeSelectorTests extends ESTestCase { + "[cannot start datafeed [datafeed_id], because job's [job_id] state is [closed] while state [opened] is required]")); } - public void testSelectNode_GivenJobFailedOrClosed() throws Exception { + public void testSelectNode_GivenJobFailedOrClosed() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo"))); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -149,13 +149,13 @@ public class DatafeedNodeSelectorTests extends ESTestCase { + "] while state [opened] is required]")); } - public void testShardUnassigned() throws Exception { + public void testShardUnassigned() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); // Using wildcard index name to test for index resolving as well - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*"))); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -175,13 +175,13 @@ public class DatafeedNodeSelectorTests extends ESTestCase { new DatafeedNodeSelector(clusterState, resolver, "datafeed_id").checkDatafeedTaskCanBeCreated(); } - public void testShardNotAllActive() throws Exception { + public void testShardNotAllActive() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); // Using wildcard index name to test for index resolving as well - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*"))); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -202,11 +202,11 @@ public class DatafeedNodeSelectorTests extends ESTestCase { new DatafeedNodeSelector(clusterState, resolver, "datafeed_id").checkDatafeedTaskCanBeCreated(); } - public void testIndexDoesntExist() throws Exception { + public void testIndexDoesntExist() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo"))); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), null); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -230,7 +230,7 @@ public class DatafeedNodeSelectorTests extends ESTestCase { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo"))); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); mlMetadata = mlMetadataBuilder.build(); String nodeId = randomBoolean() ? "node_id2" : null; @@ -261,14 +261,14 @@ public class DatafeedNodeSelectorTests extends ESTestCase { new DatafeedNodeSelector(clusterState, resolver, "datafeed_id").checkDatafeedTaskCanBeCreated(); } - public void testSelectNode_GivenJobOpeningAndIndexDoesNotExist() throws Exception { + public void testSelectNode_GivenJobOpeningAndIndexDoesNotExist() { // Here we test that when there are 2 problems, the most critical gets reported first. // In this case job is Opening (non-critical) and the index does not exist (critical) MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo"))); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), null); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -339,4 +339,4 @@ public class DatafeedNodeSelectorTests extends ESTestCase { return new RoutingTable.Builder().add(rtBuilder).build(); } -} \ No newline at end of file +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdateTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdateTests.java index d549bf6b531..8b153de708a 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdateTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdateTests.java @@ -25,9 +25,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.ml.datafeed.ChunkingConfig.Mode; -import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -93,7 +91,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase createRandomized(datafeed.getId() + "_2").apply(datafeed)); + expectThrows(IllegalArgumentException.class, () -> createRandomized(datafeed.getId() + "_2").apply(datafeed, null)); } public void testApply_givenEmptyUpdate() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); - DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build().apply(datafeed); + DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build().apply(datafeed, null); assertThat(datafeed, equalTo(updatedDatafeed)); } @@ -127,7 +125,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase next() throws IOException { + public Optional next() { if (streams.isEmpty()) { hasNext = false; return Optional.empty(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java index a6de545db92..7ed00a01f35 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java @@ -15,6 +15,8 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.Client; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; @@ -24,6 +26,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -117,7 +120,10 @@ public class ScrollDataExtractorTests extends ESTestCase { @Before public void setUpTests() { + ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); capturedSearchRequests = new ArrayList<>(); capturedContinueScrollIds = new ArrayList<>(); jobId = "test-job"; @@ -269,7 +275,7 @@ public class ScrollDataExtractorTests extends ESTestCase { extractor.setNextResponse(createErrorResponse()); assertThat(extractor.hasNext(), is(true)); - expectThrows(IOException.class, () -> extractor.next()); + expectThrows(IOException.class, extractor::next); } public void testExtractionGivenContinueScrollResponseHasError() throws IOException { @@ -288,7 +294,7 @@ public class ScrollDataExtractorTests extends ESTestCase { extractor.setNextResponse(createErrorResponse()); assertThat(extractor.hasNext(), is(true)); - expectThrows(IOException.class, () -> extractor.next()); + expectThrows(IOException.class, extractor::next); } public void testExtractionGivenInitSearchResponseHasShardFailures() throws IOException { @@ -297,7 +303,7 @@ public class ScrollDataExtractorTests extends ESTestCase { extractor.setNextResponse(createResponseWithShardFailures()); assertThat(extractor.hasNext(), is(true)); - expectThrows(IOException.class, () -> extractor.next()); + expectThrows(IOException.class, extractor::next); } public void testExtractionGivenInitSearchResponseEncounteredUnavailableShards() throws IOException { @@ -306,7 +312,7 @@ public class ScrollDataExtractorTests extends ESTestCase { extractor.setNextResponse(createResponseWithUnavailableShards(1)); assertThat(extractor.hasNext(), is(true)); - IOException e = expectThrows(IOException.class, () -> extractor.next()); + IOException e = expectThrows(IOException.class, extractor::next); assertThat(e.getMessage(), equalTo("[" + jobId + "] Search request encountered [1] unavailable shards")); } @@ -333,7 +339,7 @@ public class ScrollDataExtractorTests extends ESTestCase { assertThat(output.isPresent(), is(true)); // A second failure is not tolerated assertThat(extractor.hasNext(), is(true)); - expectThrows(IOException.class, () -> extractor.next()); + expectThrows(IOException.class, extractor::next); } public void testResetScollUsesLastResultTimestamp() throws IOException { @@ -389,7 +395,7 @@ public class ScrollDataExtractorTests extends ESTestCase { assertEquals(new Long(1400L), extractor.getLastTimestamp()); // A second failure is not tolerated assertThat(extractor.hasNext(), is(true)); - expectThrows(SearchPhaseExecutionException.class, () -> extractor.next()); + expectThrows(SearchPhaseExecutionException.class, extractor::next); } public void testSearchPhaseExecutionExceptionOnInitScroll() throws IOException { @@ -398,7 +404,7 @@ public class ScrollDataExtractorTests extends ESTestCase { extractor.setNextResponse(createResponseWithShardFailures()); extractor.setNextResponse(createResponseWithShardFailures()); - expectThrows(IOException.class, () -> extractor.next()); + expectThrows(IOException.class, extractor::next); List capturedClearScrollIds = getCapturedClearScrollIds(); assertThat(capturedClearScrollIds.isEmpty(), is(true)); @@ -412,8 +418,8 @@ public class ScrollDataExtractorTests extends ESTestCase { "script2", new Script(ScriptType.INLINE, "painless", "return domainSplit('foo.com', params);", emptyMap()), false); List sFields = Arrays.asList(withoutSplit, withSplit); - ScrollDataExtractorContext context = new ScrollDataExtractorContext(jobId, extractedFields, indices, - types, query, sFields, scrollSize, 1000, 2000); + ScrollDataExtractorContext context = new ScrollDataExtractorContext(jobId, extractedFields, indices, + types, query, sFields, scrollSize, 1000, 2000, Collections.emptyMap()); TestDataExtractor extractor = new TestDataExtractor(context); @@ -460,7 +466,8 @@ public class ScrollDataExtractorTests extends ESTestCase { } private ScrollDataExtractorContext createContext(long start, long end) { - return new ScrollDataExtractorContext(jobId, extractedFields, indices, types, query, scriptFields, scrollSize, start, end); + return new ScrollDataExtractorContext(jobId, extractedFields, indices, types, query, scriptFields, scrollSize, start, end, + Collections.emptyMap()); } private SearchResponse createEmptySearchResponse() { @@ -475,9 +482,9 @@ public class ScrollDataExtractorTests extends ESTestCase { for (int i = 0; i < timestamps.size(); i++) { SearchHit hit = new SearchHit(randomInt()); Map fields = new HashMap<>(); - fields.put(extractedFields.timeField(), new DocumentField("time", Arrays.asList(timestamps.get(i)))); - fields.put("field_1", new DocumentField("field_1", Arrays.asList(field1Values.get(i)))); - fields.put("field_2", new DocumentField("field_2", Arrays.asList(field2Values.get(i)))); + fields.put(extractedFields.timeField(), new DocumentField("time", Collections.singletonList(timestamps.get(i)))); + fields.put("field_1", new DocumentField("field_1", Collections.singletonList(field1Values.get(i)))); + fields.put("field_2", new DocumentField("field_2", Collections.singletonList(field2Values.get(i)))); hit.fields(fields); hits.add(hit); } @@ -519,4 +526,4 @@ public class ScrollDataExtractorTests extends ESTestCase { return reader.lines().collect(Collectors.joining("\n")); } } -} \ No newline at end of file +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 81d64eca366..319ca213193 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.test.rest.XPackRestTestHelper; import org.junit.After; import org.junit.Before; @@ -24,8 +25,10 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; +import java.util.Arrays; import java.util.Collections; import java.util.Date; +import java.util.List; import java.util.Locale; import java.util.stream.Collectors; @@ -39,6 +42,8 @@ public class DatafeedJobsRestIT extends ESRestTestCase { basicAuthHeaderValue("x_pack_rest_user", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); private static final String BASIC_AUTH_VALUE_ML_ADMIN = basicAuthHeaderValue("ml_admin", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); + private static final String BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS = + basicAuthHeaderValue("ml_admin_plus_data", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); @Override protected Settings restClientSettings() { @@ -50,25 +55,39 @@ public class DatafeedJobsRestIT extends ESRestTestCase { return true; } - private void setupUser() throws IOException { - String password = new String(SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING.getChars()); - - // This user has admin rights on machine learning, but (importantly for the tests) no - // rights on any of the data indexes - String user = "{" - + " \"password\" : \"" + password + "\"," - + " \"roles\" : [ \"machine_learning_admin\" ]" + private void setupDataAccessRole(String index) throws IOException { + String json = "{" + + " \"indices\" : [" + + " { \"names\": [\"" + index + "\"], \"privileges\": [\"read\"] }" + + " ]" + "}"; - client().performRequest("put", "_xpack/security/user/ml_admin", Collections.emptyMap(), - new StringEntity(user, ContentType.APPLICATION_JSON)); + client().performRequest("put", "_xpack/security/role/test_data_access", Collections.emptyMap(), + new StringEntity(json, ContentType.APPLICATION_JSON)); + } + + private void setupUser(String user, List roles) throws IOException { + String password = new String(SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING.getChars()); + + String json = "{" + + " \"password\" : \"" + password + "\"," + + " \"roles\" : [ " + roles.stream().map(unquoted -> "\"" + unquoted + "\"").collect(Collectors.joining(", ")) + " ]" + + "}"; + + client().performRequest("put", "_xpack/security/user/" + user, Collections.emptyMap(), + new StringEntity(json, ContentType.APPLICATION_JSON)); } @Before public void setUpData() throws Exception { - setupUser(); + setupDataAccessRole("network-data"); + // This user has admin rights on machine learning, but (importantly for the tests) no rights + // on any of the data indexes + setupUser("ml_admin", Collections.singletonList("machine_learning_admin")); + // This user has admin rights on machine learning, and read access to the network-data index + setupUser("ml_admin_plus_data", Arrays.asList("machine_learning_admin", "test_data_access")); addAirlineData(); - addNetworkData(); + addNetworkData("network-data"); } private void addAirlineData() throws IOException { @@ -221,7 +240,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase { client().performRequest("post", "_refresh"); } - private void addNetworkData() throws IOException { + private void addNetworkData(String index) throws IOException { // Create index with source = enabled, doc_values = enabled, stored = false + multi-field String mappings = "{" @@ -241,19 +260,19 @@ public class DatafeedJobsRestIT extends ESRestTestCase { + " }" + " }" + "}"; - client().performRequest("put", "network-data", Collections.emptyMap(), new StringEntity(mappings, ContentType.APPLICATION_JSON)); + client().performRequest("put", index, Collections.emptyMap(), new StringEntity(mappings, ContentType.APPLICATION_JSON)); String docTemplate = "{\"timestamp\":%d,\"host\":\"%s\",\"network_bytes_out\":%d}"; Date date = new Date(1464739200735L); for (int i=0; i<120; i++) { long byteCount = randomNonNegativeLong(); String jsonDoc = String.format(Locale.ROOT, docTemplate, date.getTime(), "hostA", byteCount); - client().performRequest("post", "network-data/doc", Collections.emptyMap(), + client().performRequest("post", index + "/doc", Collections.emptyMap(), new StringEntity(jsonDoc, ContentType.APPLICATION_JSON)); byteCount = randomNonNegativeLong(); jsonDoc = String.format(Locale.ROOT, docTemplate, date.getTime(), "hostB", byteCount); - client().performRequest("post", "network-data/doc", Collections.emptyMap(), + client().performRequest("post", index + "/doc", Collections.emptyMap(), new StringEntity(jsonDoc, ContentType.APPLICATION_JSON)); date = new Date(date.getTime() + 10_000); @@ -263,7 +282,6 @@ public class DatafeedJobsRestIT extends ESRestTestCase { client().performRequest("post", "_refresh"); } - public void testLookbackOnlyWithMixedTypes() throws Exception { new LookbackOnlyTestHelper("test-lookback-only-with-mixed-types", "airline-data") .setShouldSucceedProcessing(true).execute(); @@ -494,6 +512,52 @@ public class DatafeedJobsRestIT extends ESRestTestCase { assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":240")); } + public void testLookbackWithoutPermissions() throws Exception { + String jobId = "permission-test-network-job"; + String job = "{\"analysis_config\" :{\"bucket_span\":\"300s\"," + + "\"summary_count_field_name\":\"doc_count\"," + + "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"bytes-delta\",\"by_field_name\":\"hostname\"}]}," + + "\"data_description\" : {\"time_field\":\"timestamp\"}" + + "}"; + client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(), + new StringEntity(job, ContentType.APPLICATION_JSON)); + + String datafeedId = "datafeed-" + jobId; + String aggregations = + "{\"hostname\": {\"terms\" : {\"field\": \"host.keyword\", \"size\":10}," + + "\"aggs\": {\"buckets\": {\"date_histogram\":{\"field\":\"timestamp\",\"interval\":\"5s\"}," + + "\"aggs\": {\"timestamp\":{\"max\":{\"field\":\"timestamp\"}}," + + "\"bytes-delta\":{\"derivative\":{\"buckets_path\":\"avg_bytes_out\"}}," + + "\"avg_bytes_out\":{\"avg\":{\"field\":\"network_bytes_out\"}} }}}}}"; + + // At the time we create the datafeed the user can access the network-data index that we have access to + new DatafeedBuilder(datafeedId, jobId, "network-data", "doc") + .setAggregations(aggregations) + .setChunkingTimespan("300s") + .setAuthHeader(BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS) + .build(); + + // Change the role so that the user can no longer access network-data + setupDataAccessRole("some-other-data"); + + openJob(client(), jobId); + + startDatafeedAndWaitUntilStopped(datafeedId, BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS); + waitUntilJobIsClosed(jobId); + Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"); + String jobStatsResponseAsString = responseEntityToString(jobStatsResponse); + // We expect that no data made it through to the job + assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":0")); + assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":0")); + + // There should be a notification saying that there was a problem extracting data + client().performRequest("post", "_refresh"); + Response notificationsResponse = client().performRequest("get", Auditor.NOTIFICATIONS_INDEX + "/_search?q=job_id:" + jobId); + String notificationsResponseAsString = responseEntityToString(notificationsResponse); + assertThat(notificationsResponseAsString, containsString("\"message\":\"Datafeed is encountering errors extracting data: " + + "action [indices:data/read/search] is unauthorized for user [ml_admin_plus_data]\"")); + } + public void testLookbackWithPipelineBucketAgg() throws Exception { String jobId = "pipeline-bucket-agg-job"; String job = "{\"analysis_config\" :{\"bucket_span\":\"1h\"," @@ -665,10 +729,14 @@ public class DatafeedJobsRestIT extends ESRestTestCase { assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0")); } } - private void startDatafeedAndWaitUntilStopped(String datafeedId) throws Exception { + startDatafeedAndWaitUntilStopped(datafeedId, BASIC_AUTH_VALUE_SUPER_USER); + } + + private void startDatafeedAndWaitUntilStopped(String datafeedId, String authHeader) throws Exception { Response startDatafeedRequest = client().performRequest("post", - MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z&end=2016-06-02T00:00:00Z"); + MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z&end=2016-06-02T00:00:00Z", + new BasicHeader("Authorization", authHeader)); assertThat(startDatafeedRequest.getStatusLine().getStatusCode(), equalTo(200)); assertThat(responseEntityToString(startDatafeedRequest), equalTo("{\"started\":true}")); assertBusy(() -> { @@ -763,9 +831,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase { } DatafeedBuilder setChunkingTimespan(String timespan) { - chunkingTimespan = timespan; - return this; - } + chunkingTimespan = timespan; + return this; + } Response build() throws IOException { String datafeedConfig = "{" diff --git a/qa/smoke-test-ml-with-security/roles.yml b/qa/smoke-test-ml-with-security/roles.yml index 4c798037c48..e47fe40a120 100644 --- a/qa/smoke-test-ml-with-security/roles.yml +++ b/qa/smoke-test-ml-with-security/roles.yml @@ -7,7 +7,7 @@ minimal: # Give all users involved in these tests access to the indices where the data to # be analyzed is stored, because the ML roles alone do not provide access to # non-ML indices - - names: [ 'airline-data', 'index-foo', 'unavailable-data' ] + - names: [ 'airline-data', 'index-*', 'unavailable-data', 'utopia' ] privileges: - indices:admin/create - indices:admin/refresh From 04c07688a2c2b02ebd19bab4075bdbc9ca62136e Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 11 Dec 2017 15:42:06 +0000 Subject: [PATCH 04/10] [ML] Align special events with buckets (elastic/x-pack-elasticsearch#3258) Original commit: elastic/x-pack-elasticsearch@71f9d0fb133b4632b76ca3a7f11f4afeda3114b6 --- .../xpack/ml/calendars/SpecialEvent.java | 25 +++++++- .../autodetect/AutodetectCommunicator.java | 31 ++++++++-- .../process/autodetect/AutodetectProcess.java | 12 ---- .../BlackHoleAutodetectProcess.java | 5 -- .../autodetect/NativeAutodetectProcess.java | 12 ---- .../autodetect/writer/FieldConfigWriter.java | 4 +- .../xpack/ml/calendars/SpecialEventTests.java | 26 ++++++-- .../AutodetectCommunicatorTests.java | 62 +++++++++++++++++-- 8 files changed, 128 insertions(+), 49 deletions(-) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java index f1291842a76..7c57ea31233 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -19,6 +20,7 @@ import org.elasticsearch.xpack.ml.job.config.DetectionRule; import org.elasticsearch.xpack.ml.job.config.Operator; import org.elasticsearch.xpack.ml.job.config.RuleAction; import org.elasticsearch.xpack.ml.job.config.RuleCondition; +import org.elasticsearch.xpack.ml.utils.Intervals; import org.elasticsearch.xpack.ml.utils.time.TimeUtils; import java.io.IOException; @@ -124,10 +126,27 @@ public class SpecialEvent implements ToXContentObject, Writeable { return documentId(id); } - public DetectionRule toDetectionRule() { + /** + * Convert the special event to a detection rule. + * The rule will have 2 time based conditions for the start and + * end of the event. + * + * The rule's start and end times are aligned with the bucket span + * so the start time is rounded down to a bucket interval and the + * end time rounded up. + * + * @param bucketSpan Bucket span to align to + * @return The event as a detection rule. + */ + public DetectionRule toDetectionRule(TimeValue bucketSpan) { List conditions = new ArrayList<>(); - conditions.add(RuleCondition.createTime(Operator.GTE, this.getStartTime().toEpochSecond())); - conditions.add(RuleCondition.createTime(Operator.LT, this.getEndTime().toEpochSecond())); + + long bucketSpanSecs = bucketSpan.getSeconds(); + + long bucketStartTime = Intervals.alignToFloor(getStartTime().toEpochSecond(), bucketSpanSecs); + conditions.add(RuleCondition.createTime(Operator.GTE, bucketStartTime)); + long bucketEndTime = Intervals.alignToCeil(getEndTime().toEpochSecond(), bucketSpanSecs); + conditions.add(RuleCondition.createTime(Operator.LT, bucketEndTime)); DetectionRule.Builder builder = new DetectionRule.Builder(conditions); builder.setRuleAction(RuleAction.SKIP_SAMPLING_AND_FILTER_RESULTS); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java index 57d35633c9a..17f672389da 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java @@ -16,9 +16,9 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.ml.calendars.SpecialEvent; import org.elasticsearch.xpack.ml.job.config.DataDescription; +import org.elasticsearch.xpack.ml.job.config.DetectionRule; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.JobUpdate; -import org.elasticsearch.xpack.ml.job.config.ModelPlotConfig; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; import org.elasticsearch.xpack.ml.job.process.CountingInputStream; import org.elasticsearch.xpack.ml.job.process.DataCountsReporter; @@ -39,6 +39,8 @@ import java.io.IOException; import java.io.InputStream; import java.time.Duration; import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.concurrent.CountDownLatch; @@ -49,6 +51,7 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.stream.Collectors; public class AutodetectCommunicator implements Closeable { @@ -192,17 +195,37 @@ public class AutodetectCommunicator implements Closeable { autodetectProcess.writeUpdateModelPlotMessage(updateParams.getModelPlotConfig()); } + List eventsAsRules = Collections.emptyList(); + if (specialEvents.isEmpty() == false) { + eventsAsRules = specialEvents.stream() + .map(e -> e.toDetectionRule(job.getAnalysisConfig().getBucketSpan())) + .collect(Collectors.toList()); + } + + // All detection rules for a detector must be updated together as the update + // wipes any previously set rules. + // Build a single list of rules for special events and detection rules. + List> rules = new ArrayList<>(job.getAnalysisConfig().getDetectors().size()); + for (int i = 0; i < job.getAnalysisConfig().getDetectors().size(); i++) { + List detectorRules = new ArrayList<>(eventsAsRules); + rules.add(detectorRules); + } + + // Add detector rules if (updateParams.getDetectorUpdates() != null) { for (JobUpdate.DetectorUpdate update : updateParams.getDetectorUpdates()) { if (update.getRules() != null) { - autodetectProcess.writeUpdateDetectorRulesMessage(update.getDetectorIndex(), update.getRules()); + rules.get(update.getDetectorIndex()).addAll(update.getRules()); } } } - if (updateParams.isUpdateSpecialEvents()) { - autodetectProcess.writeUpdateSpecialEventsMessage(job.getAnalysisConfig().getDetectors().size(), specialEvents); + for (int i = 0; i < job.getAnalysisConfig().getDetectors().size(); i++) { + if (!rules.get(i).isEmpty()) { + autodetectProcess.writeUpdateDetectorRulesMessage(i, rules.get(i)); + } } + return null; }, handler); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcess.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcess.java index e8c5a6d384a..29ff2fabea8 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcess.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcess.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.ml.job.process.autodetect; -import org.elasticsearch.xpack.ml.calendars.SpecialEvent; import org.elasticsearch.xpack.ml.job.config.DetectionRule; import org.elasticsearch.xpack.ml.job.config.ModelPlotConfig; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; @@ -75,17 +74,6 @@ public interface AutodetectProcess extends Closeable { void writeUpdateDetectorRulesMessage(int detectorIndex, List rules) throws IOException; - /** - * Write the updated special events overwriting any previous events. - * Writing an empty list of special events removes any previously set events. - * - * @param numberOfDetectors The number of detectors in the job. All will be - * updated with the special events - * @param specialEvents List of events to update - * @throws IOException If the write fails - */ - void writeUpdateSpecialEventsMessage(int numberOfDetectors, List specialEvents) throws IOException; - /** * Flush the job pushing any stale data into autodetect. * Every flush command generates a unique flush Id which will be output diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/BlackHoleAutodetectProcess.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/BlackHoleAutodetectProcess.java index 666c4bcd56c..9d27c8bb62b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/BlackHoleAutodetectProcess.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/BlackHoleAutodetectProcess.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.ml.job.process.autodetect; -import org.elasticsearch.xpack.ml.calendars.SpecialEvent; import org.elasticsearch.xpack.ml.job.config.DetectionRule; import org.elasticsearch.xpack.ml.job.config.ModelPlotConfig; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; @@ -72,10 +71,6 @@ public class BlackHoleAutodetectProcess implements AutodetectProcess { public void writeUpdateDetectorRulesMessage(int detectorIndex, List rules) throws IOException { } - @Override - public void writeUpdateSpecialEventsMessage(int numberOfDetectors, List specialEvents) throws IOException { - } - /** * Accept the request do nothing with it but write the flush acknowledgement to {@link #readAutodetectResults()} * @param params Should interim results be generated diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcess.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcess.java index b8c53ae0915..74cb30ab5e8 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcess.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcess.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.calendars.SpecialEvent; import org.elasticsearch.xpack.ml.job.config.DetectionRule; import org.elasticsearch.xpack.ml.job.config.ModelPlotConfig; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; @@ -42,7 +41,6 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import java.util.stream.Collectors; /** * Autodetect process using native code. @@ -161,16 +159,6 @@ class NativeAutodetectProcess implements AutodetectProcess { writer.writeUpdateDetectorRulesMessage(detectorIndex, rules); } - @Override - public void writeUpdateSpecialEventsMessage(int numberOfEvents, List specialEvents) throws IOException { - ControlMsgToProcessWriter writer = new ControlMsgToProcessWriter(recordWriter, numberOfAnalysisFields); - - List eventsAsRules = specialEvents.stream().map(SpecialEvent::toDetectionRule).collect(Collectors.toList()); - for (int i = 0; i < numberOfEvents; i++) { - writer.writeUpdateDetectorRulesMessage(i, eventsAsRules); - } - } - @Override public String flushJob(FlushJobParams params) throws IOException { ControlMsgToProcessWriter writer = new ControlMsgToProcessWriter(recordWriter, numberOfAnalysisFields); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/FieldConfigWriter.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/FieldConfigWriter.java index 985af877773..3d107d39ff1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/FieldConfigWriter.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/FieldConfigWriter.java @@ -79,8 +79,8 @@ public class FieldConfigWriter { private void writeDetectors(StringBuilder contents) throws IOException { int counter = 0; - - List events = specialEvents.stream().map(SpecialEvent::toDetectionRule).collect(Collectors.toList()); + List events = specialEvents.stream().map(e -> e.toDetectionRule(config.getBucketSpan())) + .collect(Collectors.toList()); for (Detector detector : config.getDetectors()) { int detectorId = counter++; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/calendars/SpecialEventTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/calendars/SpecialEventTests.java index 7f08a1d7839..148b6200d78 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/calendars/SpecialEventTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/calendars/SpecialEventTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ml.calendars; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.ml.job.config.Connective; @@ -25,8 +26,7 @@ import java.util.List; public class SpecialEventTests extends AbstractSerializingTestCase { - @Override - protected SpecialEvent createTestInstance() { + public static SpecialEvent createSpecialEvent() { int size = randomInt(10); List jobIds = new ArrayList<>(size); for (int i = 0; i < size; i++) { @@ -39,6 +39,11 @@ public class SpecialEventTests extends AbstractSerializingTestCase jobIds); } + @Override + protected SpecialEvent createTestInstance() { + return createSpecialEvent(); + } + @Override protected Writeable.Reader instanceReader() { return SpecialEvent::new; @@ -50,8 +55,9 @@ public class SpecialEventTests extends AbstractSerializingTestCase } public void testToDetectionRule() { + long bucketSpanSecs = 300; SpecialEvent event = createTestInstance(); - DetectionRule rule = event.toDetectionRule(); + DetectionRule rule = event.toDetectionRule(TimeValue.timeValueSeconds(bucketSpanSecs)); assertEquals(Connective.AND, rule.getConditionsConnective()); assertEquals(RuleAction.SKIP_SAMPLING_AND_FILTER_RESULTS, rule.getRuleAction()); @@ -61,10 +67,18 @@ public class SpecialEventTests extends AbstractSerializingTestCase List conditions = rule.getRuleConditions(); assertEquals(2, conditions.size()); assertEquals(RuleConditionType.TIME, conditions.get(0).getConditionType()); - assertEquals(Operator.GTE, conditions.get(0).getCondition().getOperator()); - assertEquals(event.getStartTime().toEpochSecond(), Long.parseLong(conditions.get(0).getCondition().getValue())); assertEquals(RuleConditionType.TIME, conditions.get(1).getConditionType()); + assertEquals(Operator.GTE, conditions.get(0).getCondition().getOperator()); assertEquals(Operator.LT, conditions.get(1).getCondition().getOperator()); - assertEquals(event.getEndTime().toEpochSecond(), Long.parseLong(conditions.get(1).getCondition().getValue())); + + // Check times are aligned with the bucket + long conditionStartTime = Long.parseLong(conditions.get(0).getCondition().getValue()); + assertEquals(0, conditionStartTime % bucketSpanSecs); + long bucketCount = conditionStartTime / bucketSpanSecs; + assertEquals(bucketSpanSecs * bucketCount, conditionStartTime); + + long conditionEndTime = Long.parseLong(conditions.get(1).getCondition().getValue()); + assertEquals(0, conditionEndTime % bucketSpanSecs); + assertEquals(bucketSpanSecs * (bucketCount + 1), conditionEndTime); } } \ No newline at end of file diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java index de03143be9a..4c2135c6246 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java @@ -10,11 +10,15 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.ml.action.OpenJobAction.JobTask; +import org.elasticsearch.xpack.ml.calendars.SpecialEvent; +import org.elasticsearch.xpack.ml.calendars.SpecialEventTests; import org.elasticsearch.xpack.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.ml.job.config.DataDescription; +import org.elasticsearch.xpack.ml.job.config.DetectionRule; import org.elasticsearch.xpack.ml.job.config.Detector; import org.elasticsearch.xpack.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.config.JobUpdate; +import org.elasticsearch.xpack.ml.job.config.RuleCondition; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; import org.elasticsearch.xpack.ml.job.process.DataCountsReporter; import org.elasticsearch.xpack.ml.job.process.autodetect.output.AutoDetectResultProcessor; @@ -23,13 +27,17 @@ import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.mockito.InOrder; import org.mockito.Mockito; import java.io.ByteArrayInputStream; import java.io.IOException; import java.time.Duration; +import java.util.Arrays; import java.util.Collections; import java.util.Date; +import java.util.List; import java.util.Optional; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; @@ -48,6 +56,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class AutodetectCommunicatorTests extends ESTestCase { @@ -65,10 +74,52 @@ public class AutodetectCommunicatorTests extends ESTestCase { try (AutodetectCommunicator communicator = createAutodetectCommunicator(process, mock(AutoDetectResultProcessor.class))) { communicator.writeToJob(new ByteArrayInputStream(new byte[0]), randomFrom(XContentType.values()), params, (dataCounts, e) -> {}); - Mockito.verify(process).writeResetBucketsControlMessage(params); + verify(process).writeResetBucketsControlMessage(params); } } + public void testWriteUpdateProcessMessage() throws IOException { + AutodetectProcess process = mockAutodetectProcessWithOutputStream(); + when(process.isReady()).thenReturn(true); + AutodetectCommunicator communicator = createAutodetectCommunicator(process, mock(AutoDetectResultProcessor.class)); + + List conditions = Collections.singletonList( + RuleCondition.createCategorical("foo", "bar")); + + List detectorUpdates = Collections.singletonList( + new JobUpdate.DetectorUpdate(0, "updated description", + Collections.singletonList(new DetectionRule.Builder(conditions).build()))); + + UpdateParams updateParams = new UpdateParams(null, detectorUpdates, true); + List events = Collections.singletonList(SpecialEventTests.createSpecialEvent()); + + communicator.writeUpdateProcessMessage(updateParams, events, ((aVoid, e) -> {})); + + // There are 2 detectors both will be updated with the rule for the special event. + // The first has an additional update rule + ArgumentCaptor captor = ArgumentCaptor.forClass(List.class); + InOrder inOrder = Mockito.inOrder(process); + inOrder.verify(process).writeUpdateDetectorRulesMessage(eq(0), captor.capture()); + assertEquals(2, captor.getValue().size()); + inOrder.verify(process).writeUpdateDetectorRulesMessage(eq(1), captor.capture()); + assertEquals(1, captor.getValue().size()); + verify(process).isProcessAlive(); + verifyNoMoreInteractions(process); + + + // This time there is a single detector update and no special events + detectorUpdates = Collections.singletonList( + new JobUpdate.DetectorUpdate(1, "updated description", + Collections.singletonList(new DetectionRule.Builder(conditions).build()))); + updateParams = new UpdateParams(null, detectorUpdates, true); + communicator.writeUpdateProcessMessage(updateParams, Collections.emptyList(), ((aVoid, e) -> {})); + + inOrder = Mockito.inOrder(process); + inOrder.verify(process).writeUpdateDetectorRulesMessage(eq(1), captor.capture()); + assertEquals(1, captor.getValue().size()); + verify(process, times(2)).isProcessAlive(); + } + public void testFlushJob() throws IOException { AutodetectProcess process = mockAutodetectProcessWithOutputStream(); when(process.isProcessAlive()).thenReturn(true); @@ -175,9 +226,10 @@ public class AutodetectCommunicatorTests extends ESTestCase { DataDescription.Builder dd = new DataDescription.Builder(); dd.setTimeField("time_field"); - Detector.Builder detector = new Detector.Builder("metric", "value"); - detector.setByFieldName("host-metric"); - AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); + Detector.Builder metric = new Detector.Builder("metric", "value"); + metric.setByFieldName("host-metric"); + Detector.Builder count = new Detector.Builder("count", null); + AnalysisConfig.Builder ac = new AnalysisConfig.Builder(Arrays.asList(metric.build(), count.build())); builder.setDataDescription(dd); builder.setAnalysisConfig(ac); From 249d06b2561c275167c6a1714a0c422ab9a26cbd Mon Sep 17 00:00:00 2001 From: David Roberts Date: Mon, 11 Dec 2017 17:44:03 +0000 Subject: [PATCH 05/10] [ML] Fix permissions for field caps in scroll data extractor Follow up to elastic/x-pack-elasticsearch#3254 Original commit: elastic/x-pack-elasticsearch@d4df9446c0058ad0e3f45212ac49308d67397714 --- .../xpack/ml/action/PreviewDatafeedAction.java | 6 ++++++ .../extractor/scroll/ScrollDataExtractorFactory.java | 7 ++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedAction.java index 129dfce05e2..b6c5c3b54fa 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PreviewDatafeedAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MlClientHelper; import org.elasticsearch.xpack.ml.MlMetadata; import org.elasticsearch.xpack.ml.datafeed.ChunkingConfig; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; @@ -42,6 +43,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; +import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -214,6 +216,10 @@ public class PreviewDatafeedAction extends Action headers = threadPool.getThreadContext().getHeaders().entrySet().stream() + .filter(e -> MlClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + datafeedWithAutoChunking.setHeaders(headers); // NB: this is using the client from the transport layer, NOT the internal client. // This is important because it means the datafeed search will fail if the user // requesting the preview doesn't have permission to search the relevant indices. diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java index d059cf380bf..025c81ceaa5 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorFactory.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.client.Client; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.xpack.ml.MlClientHelper; import org.elasticsearch.xpack.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; @@ -74,6 +75,10 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory { // multi-fields that are not in source. String[] requestFields = job.allFields().stream().map(f -> MlStrings.getParentField(f) + "*").toArray(size -> new String[size]); fieldCapabilitiesRequest.fields(requestFields); - client.execute(FieldCapabilitiesAction.INSTANCE, fieldCapabilitiesRequest, fieldCapabilitiesHandler); + MlClientHelper.execute(datafeed, client, () -> { + client.execute(FieldCapabilitiesAction.INSTANCE, fieldCapabilitiesRequest, fieldCapabilitiesHandler); + // This response gets discarded - the listener handles the real response + return null; + }); } } From 6113b86bdbff8cfc920caed6e12f9fee44e95579 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 12 Dec 2017 09:21:44 +0000 Subject: [PATCH 06/10] [ML] Special events calendar CRUD endpoints (elastic/x-pack-elasticsearch#3267) * Calendar CRUD endpoints * Get calendars requires monitor permission * Address review comments * Add page params to get calendars Original commit: elastic/x-pack-elasticsearch@badd1e6add093dcccb0318206e15cb47533c5292 --- .../xpack/ml/MachineLearning.java | 16 +- .../elasticsearch/xpack/ml/MlMetaIndex.java | 2 + .../xpack/ml/action/DeleteCalendarAction.java | 184 ++++++++++ .../xpack/ml/action/GetCalendarsAction.java | 314 ++++++++++++++++++ .../xpack/ml/action/PutCalendarAction.java | 221 ++++++++++++ .../xpack/ml/action/PutFilterAction.java | 2 +- .../xpack/ml/action/util/PageParams.java | 5 +- .../xpack/ml/calendars/Calendar.java | 139 ++++++++ .../xpack/ml/calendars/SpecialEvent.java | 5 +- .../xpack/ml/job/config/MlFilter.java | 4 +- .../calendar/RestDeleteCalendarAction.java | 39 +++ .../rest/calendar/RestGetCalendarsAction.java | 51 +++ .../rest/calendar/RestPutCalendarAction.java | 51 +++ .../GetCalendarsActionRequestTests.java | 25 ++ .../action/PutCalendarActionRequestTests.java | 44 +++ .../xpack/ml/calendars/CalendarTests.java | 49 +++ .../xpack/ml/integration/JobProviderIT.java | 3 +- .../api/xpack.ml.delete_calendar.json | 17 + .../api/xpack.ml.get_calendars.json | 29 ++ .../api/xpack.ml.put_calendar.json | 20 ++ .../rest-api-spec/test/ml/calendar_crud.yml | 108 ++++++ qa/smoke-test-ml-with-security/build.gradle | 2 + 22 files changed, 1322 insertions(+), 8 deletions(-) create mode 100644 plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteCalendarAction.java create mode 100644 plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarsAction.java create mode 100644 plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutCalendarAction.java create mode 100644 plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/Calendar.java create mode 100644 plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java create mode 100644 plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java create mode 100644 plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java create mode 100644 plugin/src/test/java/org/elasticsearch/xpack/ml/action/GetCalendarsActionRequestTests.java create mode 100644 plugin/src/test/java/org/elasticsearch/xpack/ml/action/PutCalendarActionRequestTests.java create mode 100644 plugin/src/test/java/org/elasticsearch/xpack/ml/calendars/CalendarTests.java create mode 100644 plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_calendar.json create mode 100644 plugin/src/test/resources/rest-api-spec/api/xpack.ml.get_calendars.json create mode 100644 plugin/src/test/resources/rest-api-spec/api/xpack.ml.put_calendar.json create mode 100644 plugin/src/test/resources/rest-api-spec/test/ml/calendar_crud.yml diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index b90fe870bb3..8d3e5e8c135 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.XPackFeatureSet; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.ml.action.CloseJobAction; +import org.elasticsearch.xpack.ml.action.DeleteCalendarAction; import org.elasticsearch.xpack.ml.action.DeleteDatafeedAction; import org.elasticsearch.xpack.ml.action.DeleteExpiredDataAction; import org.elasticsearch.xpack.ml.action.DeleteFilterAction; @@ -61,6 +62,7 @@ import org.elasticsearch.xpack.ml.action.FinalizeJobExecutionAction; import org.elasticsearch.xpack.ml.action.FlushJobAction; import org.elasticsearch.xpack.ml.action.ForecastJobAction; import org.elasticsearch.xpack.ml.action.GetBucketsAction; +import org.elasticsearch.xpack.ml.action.GetCalendarsAction; import org.elasticsearch.xpack.ml.action.GetCategoriesAction; import org.elasticsearch.xpack.ml.action.GetDatafeedsAction; import org.elasticsearch.xpack.ml.action.GetDatafeedsStatsAction; @@ -76,6 +78,7 @@ import org.elasticsearch.xpack.ml.action.KillProcessAction; import org.elasticsearch.xpack.ml.action.OpenJobAction; import org.elasticsearch.xpack.ml.action.PostDataAction; import org.elasticsearch.xpack.ml.action.PreviewDatafeedAction; +import org.elasticsearch.xpack.ml.action.PutCalendarAction; import org.elasticsearch.xpack.ml.action.PutDatafeedAction; import org.elasticsearch.xpack.ml.action.PutFilterAction; import org.elasticsearch.xpack.ml.action.PutJobAction; @@ -114,6 +117,9 @@ import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerProcessFactor import org.elasticsearch.xpack.ml.notifications.AuditMessage; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.rest.RestDeleteExpiredDataAction; +import org.elasticsearch.xpack.ml.rest.calendar.RestDeleteCalendarAction; +import org.elasticsearch.xpack.ml.rest.calendar.RestGetCalendarsAction; +import org.elasticsearch.xpack.ml.rest.calendar.RestPutCalendarAction; import org.elasticsearch.xpack.ml.rest.datafeeds.RestDeleteDatafeedAction; import org.elasticsearch.xpack.ml.rest.datafeeds.RestGetDatafeedStatsAction; import org.elasticsearch.xpack.ml.rest.datafeeds.RestGetDatafeedsAction; @@ -458,7 +464,10 @@ public class MachineLearning implements ActionPlugin { new RestStopDatafeedAction(settings, restController), new RestDeleteModelSnapshotAction(settings, restController), new RestDeleteExpiredDataAction(settings, restController), - new RestForecastJobAction(settings, restController) + new RestForecastJobAction(settings, restController), + new RestGetCalendarsAction(settings, restController), + new RestPutCalendarAction(settings, restController), + new RestDeleteCalendarAction(settings, restController) ); } @@ -504,7 +513,10 @@ public class MachineLearning implements ActionPlugin { new ActionHandler<>(DeleteModelSnapshotAction.INSTANCE, DeleteModelSnapshotAction.TransportAction.class), new ActionHandler<>(UpdateProcessAction.INSTANCE, UpdateProcessAction.TransportAction.class), new ActionHandler<>(DeleteExpiredDataAction.INSTANCE, DeleteExpiredDataAction.TransportAction.class), - new ActionHandler<>(ForecastJobAction.INSTANCE, ForecastJobAction.TransportAction.class) + new ActionHandler<>(ForecastJobAction.INSTANCE, ForecastJobAction.TransportAction.class), + new ActionHandler<>(GetCalendarsAction.INSTANCE, GetCalendarsAction.TransportAction.class), + new ActionHandler<>(PutCalendarAction.INSTANCE, PutCalendarAction.TransportAction.class), + new ActionHandler<>(DeleteCalendarAction.INSTANCE, DeleteCalendarAction.TransportAction.class) ); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetaIndex.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetaIndex.java index 4139f51ab37..95e79cea92e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetaIndex.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetaIndex.java @@ -20,6 +20,8 @@ public final class MlMetaIndex { */ public static final String INDEX_NAME = ".ml-meta"; + public static final String INCLUDE_TYPE_KEY = "include_type"; + public static final String TYPE = "doc"; private MlMetaIndex() {} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteCalendarAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteCalendarAction.java new file mode 100644 index 00000000000..1fd089d9685 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteCalendarAction.java @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.calendars.Calendar; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class DeleteCalendarAction extends Action { + + public static final DeleteCalendarAction INSTANCE = new DeleteCalendarAction(); + public static final String NAME = "cluster:admin/xpack/ml/calendars/delete"; + + private DeleteCalendarAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client, this); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends AcknowledgedRequest { + + + private String calendarId; + + Request() { + + } + + public Request(String calendarId) { + this.calendarId = ExceptionsHelper.requireNonNull(calendarId, Calendar.ID.getPreferredName()); + } + + public String getCalendarId() { + return calendarId; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + calendarId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(calendarId); + } + + @Override + public int hashCode() { + return Objects.hash(calendarId); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + Request other = (Request) obj; + return Objects.equals(calendarId, other.calendarId); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + public RequestBuilder(ElasticsearchClient client, DeleteCalendarAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends AcknowledgedResponse { + + public Response(boolean acknowledged) { + super(acknowledged); + } + + private Response() {} + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + writeAcknowledged(out); + } + } + + public static class TransportAction extends HandledTransportAction { + + private final Client client; + + @Inject + public TransportAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client) { + super(settings, NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, Request::new); + this.client = client; + } + + @Override + protected void doExecute(DeleteCalendarAction.Request request, ActionListener listener) { + + final String calendarId = request.getCalendarId(); + + DeleteRequest deleteRequest = new DeleteRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, Calendar.documentId(calendarId)); + + BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + bulkRequestBuilder.add(deleteRequest); + bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), + new ActionListener() { + @Override + public void onResponse(BulkResponse bulkResponse) { + if (bulkResponse.getItems()[0].status() == RestStatus.NOT_FOUND) { + listener.onFailure(new ResourceNotFoundException("Could not delete calendar with ID [" + calendarId + + "] because it does not exist")); + } else { + listener.onResponse(new Response(true)); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(ExceptionsHelper.serverError("Could not delete calendar with ID [" + calendarId + "]", e)); + } + }); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarsAction.java new file mode 100644 index 00000000000..aafb33d2aa1 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/GetCalendarsAction.java @@ -0,0 +1,314 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.StatusToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.action.util.PageParams; +import org.elasticsearch.xpack.ml.action.util.QueryPage; +import org.elasticsearch.xpack.ml.calendars.Calendar; +import org.elasticsearch.xpack.ml.job.persistence.JobProvider; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class GetCalendarsAction extends Action { + + public static final GetCalendarsAction INSTANCE = new GetCalendarsAction(); + public static final String NAME = "cluster:monitor/xpack/ml/calendars/get"; + + private GetCalendarsAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends ActionRequest { + + private String calendarId; + private PageParams pageParams; + + public Request() { + } + + public void setCalendarId(String calendarId) { + this.calendarId = calendarId; + } + + public String getCalendarId() { + return calendarId; + } + + public PageParams getPageParams() { + return pageParams; + } + + public void setPageParams(PageParams pageParams) { + this.pageParams = pageParams; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + + if (calendarId != null && pageParams != null) { + validationException = addValidationError("Params [" + PageParams.FROM.getPreferredName() + + ", " + PageParams.SIZE.getPreferredName() + "] are incompatible with [" + + Calendar.ID.getPreferredName() + "].", + validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + calendarId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(calendarId); + } + + @Override + public int hashCode() { + return Objects.hash(calendarId); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(calendarId, other.calendarId); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + public RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new Request()); + } + } + + public static class Response extends ActionResponse implements StatusToXContentObject { + + private QueryPage calendars; + + public Response(QueryPage calendars) { + this.calendars = calendars; + } + + Response() { + } + + public QueryPage getCalendars() { + return calendars; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + calendars = new QueryPage<>(in, Calendar::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + calendars.writeTo(out); + } + + @Override + public RestStatus status() { + return RestStatus.OK; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + calendars.doXContentBody(builder, params); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(calendars); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(calendars, other.calendars); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + } + + public static class TransportAction extends HandledTransportAction { + + private final Client client; + + @Inject + public TransportAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client) { + super(settings, NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, Request::new); + this.client = client; + } + + @Override + protected void doExecute(Request request, ActionListener listener) { + final String calendarId = request.getCalendarId(); + if (request.getCalendarId() != null) { + getCalendar(calendarId, listener); + } else { + PageParams pageParams = request.getPageParams(); + if (pageParams == null) { + pageParams = PageParams.defaultParams(); + } + getCalendars(pageParams, listener); + } + } + + private void getCalendar(String calendarId, ActionListener listener) { + GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, Calendar.documentId(calendarId)); + executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener() { + @Override + public void onResponse(GetResponse getDocResponse) { + + try { + QueryPage calendars; + if (getDocResponse.isExists()) { + BytesReference docSource = getDocResponse.getSourceAsBytesRef(); + + try (XContentParser parser = + XContentFactory.xContent(docSource).createParser(NamedXContentRegistry.EMPTY, docSource)) { + Calendar calendar = Calendar.PARSER.apply(parser, null).build(); + calendars = new QueryPage<>(Collections.singletonList(calendar), 1, Calendar.RESULTS_FIELD); + + Response response = new Response(calendars); + listener.onResponse(response); + } + } else { + this.onFailure(QueryPage.emptyQueryPage(Calendar.RESULTS_FIELD)); + } + + } catch (Exception e) { + this.onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private void getCalendars(PageParams pageParams, ActionListener listener) { + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder() + .from(pageParams.getFrom()) + .size(pageParams.getSize()) + .sort(Calendar.ID.getPreferredName()) + .query(QueryBuilders.termQuery(Calendar.TYPE.getPreferredName(), Calendar.CALENDAR_TYPE)); + + SearchRequest searchRequest = new SearchRequest(MlMetaIndex.INDEX_NAME) + .indicesOptions(JobProvider.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS)) + .source(sourceBuilder); + + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, new ActionListener() { + @Override + public void onResponse(SearchResponse response) { + List docs = new ArrayList<>(); + for (SearchHit hit : response.getHits().getHits()) { + BytesReference docSource = hit.getSourceRef(); + try (XContentParser parser = XContentFactory.xContent(docSource).createParser( + NamedXContentRegistry.EMPTY, docSource)) { + docs.add(Calendar.PARSER.apply(parser, null).build()); + } catch (IOException e) { + this.onFailure(e); + } + } + + Response getResponse = new Response( + new QueryPage<>(docs, docs.size(), Calendar.RESULTS_FIELD)); + listener.onResponse(getResponse); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }, + client::search); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutCalendarAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutCalendarAction.java new file mode 100644 index 00000000000..22a1e6a06f3 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutCalendarAction.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.calendars.Calendar; +import org.elasticsearch.xpack.ml.job.messages.Messages; +import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Collections; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin; + +public class PutCalendarAction extends Action { + public static final PutCalendarAction INSTANCE = new PutCalendarAction(); + public static final String NAME = "cluster:admin/xpack/ml/calendars/put"; + + private PutCalendarAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + public static Request parseRequest(String calendarId, XContentParser parser) { + Calendar.Builder builder = Calendar.PARSER.apply(parser, null); + if (builder.getId() == null) { + builder.setId(calendarId); + } else if (!Strings.isNullOrEmpty(calendarId) && !calendarId.equals(builder.getId())) { + // If we have both URI and body filter ID, they must be identical + throw new IllegalArgumentException(Messages.getMessage(Messages.INCONSISTENT_ID, Calendar.ID.getPreferredName(), + builder.getId(), calendarId)); + } + return new Request(builder.build()); + } + + private Calendar calendar; + + Request() { + + } + + public Request(Calendar calendar) { + this.calendar = ExceptionsHelper.requireNonNull(calendar, "calendar"); + } + + public Calendar getCalendar() { + return calendar; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if ("_all".equals(calendar.getId())) { + validationException = + addValidationError("Cannot create a Calendar with the reserved name [_all]", + validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + calendar = new Calendar(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + calendar.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + calendar.toXContent(builder, params); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(calendar); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(calendar, other.calendar); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + public RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new Request()); + } + } + + public static class Response extends AcknowledgedResponse implements ToXContentObject { + + private Calendar calendar; + + Response() { + } + + public Response(Calendar calendar) { + super(true); + this.calendar = calendar; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + readAcknowledged(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + writeAcknowledged(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return calendar.toXContent(builder, params); + } + } + + public static class TransportAction extends HandledTransportAction { + + private final Client client; + + @Inject + public TransportAction(Settings settings, ThreadPool threadPool, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, Client client) { + super(settings, NAME, threadPool, transportService, actionFilters, + indexNameExpressionResolver, Request::new); + this.client = client; + } + + @Override + protected void doExecute(Request request, ActionListener listener) { + final Calendar calendar = request.getCalendar(); + IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, calendar.documentId()); + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + indexRequest.source(calendar.toXContent(builder, + new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")))); + } catch (IOException e) { + throw new IllegalStateException("Failed to serialise calendar with id [" + calendar.getId() + "]", e); + } + + // Make it an error to overwrite an existing calendar + indexRequest.opType(DocWriteRequest.OpType.CREATE); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, + new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + listener.onResponse(new Response(calendar)); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java index 86e070dcb23..d3df1004dd3 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutFilterAction.java @@ -181,7 +181,7 @@ public class PutFilterAction extends Action PARSER = new ConstructingObjectParser<>(PAGE.getPreferredName(), a -> new PageParams(a[0] == null ? DEFAULT_FROM : (int) a[0], a[1] == null ? DEFAULT_SIZE : (int) a[1])); @@ -39,6 +38,10 @@ public class PageParams implements ToXContentObject, Writeable { private final int from; private final int size; + public static PageParams defaultParams() { + return new PageParams(DEFAULT_FROM, DEFAULT_SIZE); + } + public PageParams(StreamInput in) throws IOException { this(in.readVInt(), in.readVInt()); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/Calendar.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/Calendar.java new file mode 100644 index 00000000000..ed1d2c5f093 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/Calendar.java @@ -0,0 +1,139 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.calendars; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.ml.MlMetaIndex; +import org.elasticsearch.xpack.ml.job.config.MlFilter; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +public class Calendar implements ToXContentObject, Writeable { + + public static final String CALENDAR_TYPE = "calendar"; + + public static final ParseField TYPE = new ParseField("type"); + public static final ParseField ID = new ParseField("calendar_id"); + public static final ParseField JOB_IDS = new ParseField("job_ids"); + + private static final String DOCUMENT_ID_PREFIX = "calendar_"; + + // For QueryPage + public static final ParseField RESULTS_FIELD = new ParseField("calendars"); + + public static final ObjectParser PARSER = + new ObjectParser<>(ID.getPreferredName(), Calendar.Builder::new); + + static { + PARSER.declareString(Calendar.Builder::setId, ID); + PARSER.declareStringArray(Calendar.Builder::setJobIds, JOB_IDS); + PARSER.declareString((builder, s) -> {}, TYPE); + } + + public static String documentId(String calendarId) { + return DOCUMENT_ID_PREFIX + calendarId; + } + + private final String id; + private final List jobIds; + + public Calendar(String id, List jobIds) { + this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null"); + this.jobIds = Objects.requireNonNull(jobIds, JOB_IDS.getPreferredName() + " must not be null"); + } + + public Calendar(StreamInput in) throws IOException { + id = in.readString(); + jobIds = Arrays.asList(in.readStringArray()); + } + + public String getId() { + return id; + } + + public String documentId() { + return documentId(id); + } + + public List getJobIds() { + return new ArrayList<>(jobIds); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + out.writeStringArray(jobIds.toArray(new String[jobIds.size()])); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(ID.getPreferredName(), id); + builder.field(JOB_IDS.getPreferredName(), jobIds); + if (params.paramAsBoolean(MlMetaIndex.INCLUDE_TYPE_KEY, false)) { + builder.field(TYPE.getPreferredName(), CALENDAR_TYPE); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + + if (!(obj instanceof Calendar)) { + return false; + } + + Calendar other = (Calendar) obj; + return id.equals(other.id) && jobIds.equals(other.jobIds); + } + + @Override + public int hashCode() { + return Objects.hash(id, jobIds); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + + private String calendarId; + private List jobIds = Collections.emptyList(); + + public String getId() { + return this.calendarId; + } + + public void setId(String calendarId) { + this.calendarId = calendarId; + } + + public Builder setJobIds(List jobIds) { + this.jobIds = jobIds; + return this; + } + + public Calendar build() { + return new Calendar(calendarId, jobIds); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java index 7c57ea31233..339ca80351a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/calendars/SpecialEvent.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.ml.MlMetaIndex; import org.elasticsearch.xpack.ml.job.config.Connective; import org.elasticsearch.xpack.ml.job.config.DetectionRule; import org.elasticsearch.xpack.ml.job.config.Operator; @@ -171,7 +172,9 @@ public class SpecialEvent implements ToXContentObject, Writeable { builder.dateField(START_TIME.getPreferredName(), START_TIME.getPreferredName() + "_string", startTime.toInstant().toEpochMilli()); builder.dateField(END_TIME.getPreferredName(), END_TIME.getPreferredName() + "_string", endTime.toInstant().toEpochMilli()); builder.field(JOB_IDS.getPreferredName(), jobIds); - builder.field(TYPE.getPreferredName(), SPECIAL_EVENT_TYPE); + if (params.paramAsBoolean(MlMetaIndex.INCLUDE_TYPE_KEY, false)) { + builder.field(TYPE.getPreferredName(), SPECIAL_EVENT_TYPE); + } builder.endObject(); return builder; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/MlFilter.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/MlFilter.java index deac591efc1..874405f9bf5 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/MlFilter.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/MlFilter.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.ml.MlMetaIndex; import java.io.IOException; import java.util.ArrayList; @@ -25,7 +26,6 @@ public class MlFilter implements ToXContentObject, Writeable { public static final String DOCUMENT_ID_PREFIX = "filter_"; - public static final String INCLUDE_TYPE_KEY = "include_type"; public static final String FILTER_TYPE = "filter"; public static final ParseField TYPE = new ParseField("type"); @@ -67,7 +67,7 @@ public class MlFilter implements ToXContentObject, Writeable { builder.startObject(); builder.field(ID.getPreferredName(), id); builder.field(ITEMS.getPreferredName(), items); - if (params.paramAsBoolean(INCLUDE_TYPE_KEY, false)) { + if (params.paramAsBoolean(MlMetaIndex.INCLUDE_TYPE_KEY, false)) { builder.field(TYPE.getPreferredName(), FILTER_TYPE); } builder.endObject(); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java new file mode 100644 index 00000000000..2e59fb8c514 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestDeleteCalendarAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.rest.calendar; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.AcknowledgedRestListener; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.action.DeleteCalendarAction; +import org.elasticsearch.xpack.ml.calendars.Calendar; + +import java.io.IOException; + +public class RestDeleteCalendarAction extends BaseRestHandler { + + public RestDeleteCalendarAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.DELETE, + MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this); + } + + @Override + public String getName() { + return "xpack_ml_delete_calendar_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + DeleteCalendarAction.Request request = new DeleteCalendarAction.Request(restRequest.param(Calendar.ID.getPreferredName())); + return channel -> client.execute(DeleteCalendarAction.INSTANCE, request, new AcknowledgedRestListener<>(channel)); + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java new file mode 100644 index 00000000000..449ade7d6a0 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestGetCalendarsAction.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.rest.calendar; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestStatusToXContentListener; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.action.GetCalendarsAction; +import org.elasticsearch.xpack.ml.action.util.PageParams; +import org.elasticsearch.xpack.ml.calendars.Calendar; + +import java.io.IOException; + +public class RestGetCalendarsAction extends BaseRestHandler { + + public RestGetCalendarsAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", + this); + controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "calendars/", this); + } + + @Override + public String getName() { + return "xpack_ml_get_calendars_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + GetCalendarsAction.Request getRequest = new GetCalendarsAction.Request(); + String calendarId = restRequest.param(Calendar.ID.getPreferredName()); + if (!Strings.isNullOrEmpty(calendarId)) { + getRequest.setCalendarId(calendarId); + } + + if (restRequest.hasParam(PageParams.FROM.getPreferredName()) || restRequest.hasParam(PageParams.SIZE.getPreferredName())) { + getRequest.setPageParams(new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM), + restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE))); + } + + return channel -> client.execute(GetCalendarsAction.INSTANCE, getRequest, new RestStatusToXContentListener<>(channel)); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java new file mode 100644 index 00000000000..171c1f3d801 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/rest/calendar/RestPutCalendarAction.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.rest.calendar; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.action.PutCalendarAction; +import org.elasticsearch.xpack.ml.calendars.Calendar; + +import java.io.IOException; +import java.util.Collections; + +public class RestPutCalendarAction extends BaseRestHandler { + + public RestPutCalendarAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.PUT, + MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this); + } + + @Override + public String getName() { + return "xpack_ml_put_calendar_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String calendarId = restRequest.param(Calendar.ID.getPreferredName()); + + PutCalendarAction.Request putCalendarRequest; + // A calendar can be created with just a name or with an optional body + if (restRequest.hasContentOrSourceParam()) { + XContentParser parser = restRequest.contentOrSourceParamParser(); + putCalendarRequest = PutCalendarAction.Request.parseRequest(calendarId, parser); + } else { + putCalendarRequest = new PutCalendarAction.Request(new Calendar(calendarId, Collections.emptyList())); + } + + return channel -> client.execute(PutCalendarAction.INSTANCE, putCalendarRequest, new RestToXContentListener<>(channel)); + } +} + diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/GetCalendarsActionRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/GetCalendarsActionRequestTests.java new file mode 100644 index 00000000000..b177f646bc9 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/GetCalendarsActionRequestTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.test.AbstractStreamableTestCase; + +public class GetCalendarsActionRequestTests extends AbstractStreamableTestCase { + + + @Override + protected GetCalendarsAction.Request createTestInstance() { + GetCalendarsAction.Request request = new GetCalendarsAction.Request(); + request.setCalendarId(randomAlphaOfLengthBetween(1, 20)); + return request; + } + + @Override + protected GetCalendarsAction.Request createBlankInstance() { + return new GetCalendarsAction.Request(); + } + +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/PutCalendarActionRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/PutCalendarActionRequestTests.java new file mode 100644 index 00000000000..db9879a0ff6 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/PutCalendarActionRequestTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.ml.calendars.Calendar; + +import java.util.ArrayList; +import java.util.List; + +public class PutCalendarActionRequestTests extends AbstractStreamableXContentTestCase { + + private final String calendarId = randomAlphaOfLengthBetween(1, 20); + + @Override + protected PutCalendarAction.Request createTestInstance() { + int size = randomInt(10); + List jobIds = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + jobIds.add(randomAlphaOfLengthBetween(1, 20)); + } + Calendar calendar = new Calendar(calendarId, jobIds); + return new PutCalendarAction.Request(calendar); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected PutCalendarAction.Request createBlankInstance() { + return new PutCalendarAction.Request(); + } + + @Override + protected PutCalendarAction.Request doParseInstance(XContentParser parser) { + return PutCalendarAction.Request.parseRequest(calendarId, parser); + } +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/calendars/CalendarTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/calendars/CalendarTests.java new file mode 100644 index 00000000000..23755a0ded9 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/calendars/CalendarTests.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.calendars; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class CalendarTests extends AbstractSerializingTestCase { + + @Override + protected Calendar createTestInstance() { + int size = randomInt(10); + List items = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + items.add(randomAlphaOfLengthBetween(1, 20)); + } + return new Calendar(randomAlphaOfLengthBetween(1, 20), items); + } + + @Override + protected Writeable.Reader instanceReader() { + return Calendar::new; + } + + @Override + protected Calendar doParseInstance(XContentParser parser) throws IOException { + return Calendar.PARSER.apply(parser, null).build(); + } + + public void testNullId() { + NullPointerException ex = expectThrows(NullPointerException.class, () -> new Calendar(null, Collections.emptyList())); + assertEquals(Calendar.ID.getPreferredName() + " must not be null", ex.getMessage()); + } + + public void testDocumentId() { + assertThat(Calendar.documentId("foo"), equalTo("calendar_foo")); + } +} \ No newline at end of file diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/JobProviderIT.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/JobProviderIT.java index 4acb4afdb32..96f932c86f0 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/JobProviderIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/JobProviderIT.java @@ -278,7 +278,8 @@ public class JobProviderIT extends XPackSingleNodeTestCase { for (SpecialEvent event : events) { IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, event.documentId()); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - indexRequest.source(event.toXContent(builder, ToXContent.EMPTY_PARAMS)); + ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")); + indexRequest.source(event.toXContent(builder, params)); bulkRequest.add(indexRequest); } } diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_calendar.json b/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_calendar.json new file mode 100644 index 00000000000..3fcfa8582e5 --- /dev/null +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.ml.delete_calendar.json @@ -0,0 +1,17 @@ +{ + "xpack.ml.delete_calendar": { + "methods": [ "DELETE" ], + "url": { + "path": "/_xpack/ml/calendars/{calendar_id}", + "paths": [ "/_xpack/ml/calendars/{calendar_id}" ], + "parts": { + "calendar_id": { + "type" : "string", + "required" : true, + "description" : "The ID of the calendar to delete" + } + } + }, + "body": null + } +} diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.ml.get_calendars.json b/plugin/src/test/resources/rest-api-spec/api/xpack.ml.get_calendars.json new file mode 100644 index 00000000000..44c06e3501b --- /dev/null +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.ml.get_calendars.json @@ -0,0 +1,29 @@ +{ + "xpack.ml.get_calendars": { + "methods": [ "GET" ], + "url": { + "path": "/_xpack/ml/calendars/{calendar_id}", + "paths": [ + "/_xpack/ml/calendars", + "/_xpack/ml/calendars/{calendar_id}" + ], + "parts": { + "calendar_id": { + "type": "string", + "description": "The ID of the calendar to fetch" + } + }, + "params": { + "from": { + "type": "int", + "description": "skips a number of calendars" + }, + "size": { + "type": "int", + "description": "specifies a max number of calendars to get" + } + } + }, + "body": null + } +} diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.ml.put_calendar.json b/plugin/src/test/resources/rest-api-spec/api/xpack.ml.put_calendar.json new file mode 100644 index 00000000000..d762ad29315 --- /dev/null +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.ml.put_calendar.json @@ -0,0 +1,20 @@ +{ + "xpack.ml.put_calendar": { + "methods": [ "PUT" ], + "url": { + "path": "/_xpack/ml/calendars/{calendar_id}", + "paths": [ "/_xpack/ml/calendars/{calendar_id}" ], + "parts": { + "calendar_id": { + "type": "string", + "required": true, + "description": "The ID of the calendar to create" + } + } + }, + "body": { + "description" : "The calendar details", + "required" : false + } + } +} diff --git a/plugin/src/test/resources/rest-api-spec/test/ml/calendar_crud.yml b/plugin/src/test/resources/rest-api-spec/test/ml/calendar_crud.yml new file mode 100644 index 00000000000..216cb521b5d --- /dev/null +++ b/plugin/src/test/resources/rest-api-spec/test/ml/calendar_crud.yml @@ -0,0 +1,108 @@ +--- +"Test calendar CRUD": + + - do: + xpack.ml.put_calendar: + calendar_id: "advent" + body: > + { + "job_ids": ["abc", "xyz"] + } + - match: { calendar_id: advent } + - match: { job_ids.0: abc } + - match: { job_ids.1: xyz } + + - do: + xpack.ml.get_calendars: + calendar_id: "advent" + - match: { count: 1 } + - match: + calendars.0: + calendar_id: "advent" + job_ids: ["abc", "xyz"] + - is_false: type + + - do: + xpack.ml.put_calendar: + calendar_id: "Dogs of the Year" + body: > + { + "job_ids": ["abc2"] + } + + - do: + xpack.ml.put_calendar: + calendar_id: "Cats of the Year" + + - do: + xpack.ml.get_calendars: {} + - match: { count: 3 } + + - do: + xpack.ml.delete_calendar: + calendar_id: "Dogs of the Year" + + - do: + xpack.ml.get_calendars: {} + - match: { count: 2 } + + - do: + catch: missing + xpack.ml.get_calendars: + calendar_id: "Dogs of the Year" + +--- +"Test PageParams": + - do: + xpack.ml.put_calendar: + calendar_id: "Calendar1" + - do: + xpack.ml.put_calendar: + calendar_id: "Calendar2" + - do: + xpack.ml.put_calendar: + calendar_id: "Calendar3" + + - do: + xpack.ml.get_calendars: + from: 2 + - match: { count: 1 } + - match: { calendars.0.calendar_id: Calendar3 } + + - do: + xpack.ml.get_calendars: + from: 1 + size: 1 + - match: { count: 1 } + - match: { calendars.0.calendar_id: Calendar2 } + +--- +"Test PageParams with ID is invalid": + - do: + catch: bad_request + xpack.ml.get_calendars: + calendar_id: Tides + size: 10 + +--- +"Test cannot overwrite an exisiting calendar": + + - do: + xpack.ml.put_calendar: + calendar_id: "Mayan" + body: > + { + "job_ids": ["apocalypse"] + } + + - do: + catch: /version_conflict_engine_exception/ + xpack.ml.put_calendar: + calendar_id: "Mayan" + +--- +"Test cannot create calendar with name _all": + - do: + catch: bad_request + xpack.ml.put_calendar: + calendar_id: "_all" diff --git a/qa/smoke-test-ml-with-security/build.gradle b/qa/smoke-test-ml-with-security/build.gradle index d34c99af35a..c39154ea767 100644 --- a/qa/smoke-test-ml-with-security/build.gradle +++ b/qa/smoke-test-ml-with-security/build.gradle @@ -17,6 +17,8 @@ integTestRunner { systemProperty 'tests.rest.blacklist', [ // Remove tests that are expected to throw an exception, because we cannot then // know whether to expect an authorization exception or a validation exception + 'ml/calendar_crud/Test cannot create calendar with name _all', + 'ml/calendar_crud/Test PageParams with ID is invalid', 'ml/custom_all_field/Test querying custom all field', 'ml/datafeeds_crud/Test delete datafeed with missing id', 'ml/datafeeds_crud/Test put datafeed referring to missing job_id', From 0d46e9035c9dce24b91d77d1b918d65238c484bf Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 12 Dec 2017 11:15:54 +0000 Subject: [PATCH 07/10] [ML] Get Filters should use executeAsyncWithOrigin (elastic/x-pack-elasticsearch#3295) * Get Filters should use executeAsyncWithOrigin Original commit: elastic/x-pack-elasticsearch@786c7bfd06eeabccbc49d54b483f24f7a18a70fd --- .../xpack/ml/action/DeleteFilterAction.java | 3 +- .../xpack/ml/action/GetFiltersAction.java | 48 ++++++++----------- .../xpack/ml/action/PutCalendarAction.java | 3 +- .../xpack/ml/action/PutFilterAction.java | 4 +- .../ml/rest/filter/RestGetFiltersAction.java | 7 ++- .../rest-api-spec/test/ml/filter_crud.yml | 16 +++++++ .../rest-api-spec/test/ml/get_filters.yml | 20 -------- 7 files changed, 44 insertions(+), 57 deletions(-) delete mode 100644 plugin/src/test/resources/rest-api-spec/test/ml/get_filters.yml diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java index dadf47bee68..ba1598b851d 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/DeleteFilterAction.java @@ -202,8 +202,7 @@ public class DeleteFilterAction extends Action { @@ -81,10 +83,6 @@ public class GetFiltersAction extends Action { - private final TransportGetAction transportGetAction; - private final TransportSearchAction transportSearchAction; + private final Client client; @Inject public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - TransportGetAction transportGetAction, TransportSearchAction transportSearchAction) { + Client client) { super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); - this.transportGetAction = transportGetAction; - this.transportSearchAction = transportSearchAction; + this.client = client; } @Override @@ -237,16 +228,18 @@ public class GetFiltersAction extends Action listener) { GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); - transportGetAction.execute(getRequest, new ActionListener() { + executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener() { @Override public void onResponse(GetResponse getDocResponse) { @@ -287,7 +280,7 @@ public class GetFiltersAction extends Action() { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, searchRequest, new ActionListener() { @Override public void onResponse(SearchResponse response) { List docs = new ArrayList<>(); @@ -310,7 +303,8 @@ public class GetFiltersAction extends Action client.execute(GetFiltersAction.INSTANCE, getListRequest, new RestStatusToXContentListener<>(channel)); diff --git a/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml b/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml index a8667487d0a..c83c7ea2785 100644 --- a/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml +++ b/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml @@ -202,3 +202,19 @@ setup: catch: missing xpack.ml.get_filters: filter_id: "filter-foo" + +--- +"Test get all filter given no filter exists": + + - do: + xpack.ml.delete_filter: + filter_id: "filter-foo" + + - do: + xpack.ml.delete_filter: + filter_id: "filter-foo2" + + - do: + xpack.ml.get_filters: {} + - match: { count: 0 } + - match: { filters: [] } diff --git a/plugin/src/test/resources/rest-api-spec/test/ml/get_filters.yml b/plugin/src/test/resources/rest-api-spec/test/ml/get_filters.yml deleted file mode 100644 index e969722b4c8..00000000000 --- a/plugin/src/test/resources/rest-api-spec/test/ml/get_filters.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -"Test get all filter given no filter exists": - - - do: - xpack.ml.put_filter: - filter_id: filter-foo - body: > - { - "filter_id": "filter-foo", - "items": ["abc", "xyz"] - } - - - do: - xpack.ml.delete_filter: - filter_id: "filter-foo" - - - do: - xpack.ml.get_filters: {} - - match: { count: 0 } - - match: { filters: [] } From 711254fd2414c9f48b7b8f0067b4df02fe0a3b81 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Tue, 12 Dec 2017 12:29:40 +0100 Subject: [PATCH 08/10] [Monitoring] Use the same Cluster State for all Collectors (elastic/x-pack-elasticsearch#3216) This commit changes the Collectors so that they all use the same instance of ClusterState. relates elastic/x-pack-elasticsearch#3156 Original commit: elastic/x-pack-elasticsearch@4f537b026c5a4dc7ce7912b5480bd5c8fa1c8ce5 --- .../xpack/monitoring/Monitoring.java | 2 +- .../xpack/monitoring/MonitoringService.java | 12 +++-- .../xpack/monitoring/collector/Collector.java | 34 ++++++++----- .../cluster/ClusterStatsCollector.java | 12 +++-- .../indices/IndexRecoveryCollector.java | 12 +++-- .../indices/IndexStatsCollector.java | 11 +++-- .../collector/ml/JobStatsCollector.java | 16 +++--- .../collector/node/NodeStatsCollector.java | 14 ++++-- .../collector/shards/ShardsCollector.java | 14 +++--- .../monitoring/MonitoringServiceTests.java | 8 +-- .../collector/BaseCollectorTestCase.java | 2 +- .../cluster/ClusterStatsCollectorTests.java | 16 ++---- .../indices/IndexRecoveryCollectorTests.java | 28 +++++------ .../indices/IndexStatsCollectorTests.java | 27 +++++----- .../collector/ml/JobStatsCollectorTests.java | 49 +++++++++++-------- .../node/NodeStatsCollectorTests.java | 20 +++++--- .../shards/ShardsCollectorTests.java | 26 +++++----- 17 files changed, 170 insertions(+), 133 deletions(-) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index 3e3bc5d0af7..85413348ccd 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -161,7 +161,7 @@ public class Monitoring implements ActionPlugin { collectors.add(new IndexRecoveryCollector(settings, clusterService, licenseState, client)); collectors.add(new JobStatsCollector(settings, clusterService, licenseState, client)); - final MonitoringService monitoringService = new MonitoringService(settings, clusterSettings, threadPool, collectors, exporters); + final MonitoringService monitoringService = new MonitoringService(settings, clusterService, threadPool, collectors, exporters); return Arrays.asList(monitoringService, exporters, cleanerService); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java index c7588a09541..30e4df5d953 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.monitoring; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; @@ -61,6 +63,7 @@ public class MonitoringService extends AbstractLifecycleComponent { /** Task in charge of collecting and exporting monitoring data **/ private final MonitoringExecution monitor = new MonitoringExecution(); + private final ClusterService clusterService; private final ThreadPool threadPool; private final Set collectors; private final Exporters exporters; @@ -68,14 +71,15 @@ public class MonitoringService extends AbstractLifecycleComponent { private volatile TimeValue interval; private volatile ThreadPool.Cancellable scheduler; - MonitoringService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool, + MonitoringService(Settings settings, ClusterService clusterService, ThreadPool threadPool, Set collectors, Exporters exporters) { super(settings); + this.clusterService = Objects.requireNonNull(clusterService); this.threadPool = Objects.requireNonNull(threadPool); this.collectors = Objects.requireNonNull(collectors); this.exporters = Objects.requireNonNull(exporters); this.interval = INTERVAL.get(settings); - clusterSettings.addSettingsUpdateConsumer(INTERVAL, this::setInterval); + clusterService.getClusterSettings().addSettingsUpdateConsumer(INTERVAL, this::setInterval); } void setInterval(TimeValue interval) { @@ -191,6 +195,8 @@ public class MonitoringService extends AbstractLifecycleComponent { @Override protected void doRun() throws Exception { final long timestamp = System.currentTimeMillis(); + final long intervalInMillis = interval.getMillis(); + final ClusterState clusterState = clusterService.state(); final Collection results = new ArrayList<>(); for (Collector collector : collectors) { @@ -201,7 +207,7 @@ public class MonitoringService extends AbstractLifecycleComponent { } try { - Collection result = collector.collect(timestamp, interval.getMillis()); + Collection result = collector.collect(timestamp, intervalInMillis, clusterState); if (result != null) { results.addAll(result); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java index 7c8aacd04a2..b86aa071804 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/Collector.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.monitoring.collector; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -68,8 +69,10 @@ public abstract class Collector extends AbstractComponent { /** * Indicates if the current collector is allowed to collect data + * + * @param isElectedMaster true if the current local node is the elected master node */ - protected boolean shouldCollect() { + protected boolean shouldCollect(final boolean isElectedMaster) { if (licenseState.isMonitoringAllowed() == false) { logger.trace("collector [{}] can not collect data due to invalid license", name()); return false; @@ -77,15 +80,12 @@ public abstract class Collector extends AbstractComponent { return true; } - protected boolean isLocalNodeMaster() { - return clusterService.state().nodes().isLocalNodeElectedMaster(); - } - - public Collection collect(final long timestamp, final long interval) { + public Collection collect(final long timestamp, final long interval, final ClusterState clusterState) { try { - if (shouldCollect()) { + final boolean isElectedMaster = clusterState.getNodes().isLocalNodeElectedMaster(); + if (shouldCollect(isElectedMaster)) { logger.trace("collector [{}] - collecting data...", name()); - return doCollect(convertNode(timestamp, clusterService.localNode()), interval); + return doCollect(convertNode(timestamp, clusterService.localNode()), interval, clusterState); } } catch (ElasticsearchTimeoutException e) { logger.error((Supplier) () -> new ParameterizedMessage("collector [{}] timed out when collecting data", name())); @@ -95,11 +95,9 @@ public abstract class Collector extends AbstractComponent { return null; } - protected abstract Collection doCollect(MonitoringDoc.Node sourceNode, long interval) throws Exception; - - protected String clusterUUID() { - return clusterService.state().metaData().clusterUUID(); - } + protected abstract Collection doCollect(MonitoringDoc.Node node, + long interval, + ClusterState clusterState) throws Exception; /** * Returns a timestamp to use in {@link MonitoringDoc} @@ -110,6 +108,16 @@ public abstract class Collector extends AbstractComponent { return System.currentTimeMillis(); } + /** + * Extracts the current cluster's UUID from a {@link ClusterState} + * + * @param clusterState the {@link ClusterState} + * @return the cluster's UUID + */ + protected static String clusterUuid(final ClusterState clusterState) { + return clusterState.metaData().clusterUUID(); + } + /** * Returns the value of the collection timeout configured for the current {@link Collector}. * diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java index 523bcc72f72..04a28c93b47 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java @@ -81,13 +81,15 @@ public class ClusterStatsCollector extends Collector { } @Override - protected boolean shouldCollect() { + protected boolean shouldCollect(final boolean isElectedMaster) { // This collector can always collect data on the master node - return isLocalNodeMaster(); + return isElectedMaster; } @Override - protected Collection doCollect(final MonitoringDoc.Node node, final long interval) throws Exception { + protected Collection doCollect(final MonitoringDoc.Node node, + final long interval, + final ClusterState clusterState) throws Exception { final Supplier clusterStatsSupplier = () -> client.admin().cluster().prepareClusterStats().get(getCollectionTimeout()); final Supplier> usageSupplier = @@ -96,8 +98,8 @@ public class ClusterStatsCollector extends Collector { final ClusterStatsResponse clusterStats = clusterStatsSupplier.get(); final String clusterName = clusterService.getClusterName().value(); + final String clusterUuid = clusterUuid(clusterState); final String version = Version.CURRENT.toString(); - final ClusterState clusterState = clusterService.state(); final License license = licenseService.getLicense(); final List xpackUsage = collect(usageSupplier); final boolean apmIndicesExist = doAPMIndicesExist(clusterState); @@ -108,7 +110,7 @@ public class ClusterStatsCollector extends Collector { // Adds a cluster stats document return Collections.singleton( - new ClusterStatsMonitoringDoc(clusterUUID(), timestamp(), interval, node, clusterName, version, clusterStats.getStatus(), + new ClusterStatsMonitoringDoc(clusterUuid, timestamp(), interval, node, clusterName, version, clusterStats.getStatus(), license, apmIndicesExist, xpackUsage, clusterStats, clusterState, clusterNeedsTLSEnabled)); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollector.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollector.java index 34d681be839..06cebf7a857 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollector.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.monitoring.collector.indices; import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -59,12 +60,14 @@ public class IndexRecoveryCollector extends Collector { } @Override - protected boolean shouldCollect() { - return super.shouldCollect() && isLocalNodeMaster(); + protected boolean shouldCollect(final boolean isElectedMaster) { + return isElectedMaster && super.shouldCollect(isElectedMaster); } @Override - protected Collection doCollect(final MonitoringDoc.Node node, final long interval) throws Exception { + protected Collection doCollect(final MonitoringDoc.Node node, + final long interval, + final ClusterState clusterState) throws Exception { List results = new ArrayList<>(1); RecoveryResponse recoveryResponse = client.admin().indices().prepareRecoveries() .setIndices(getCollectionIndices()) @@ -73,7 +76,8 @@ public class IndexRecoveryCollector extends Collector { .get(getCollectionTimeout()); if (recoveryResponse.hasRecoveries()) { - results.add(new IndexRecoveryMonitoringDoc(clusterUUID(), timestamp(), interval, node, recoveryResponse)); + final String clusterUuid = clusterUuid(clusterState); + results.add(new IndexRecoveryMonitoringDoc(clusterUuid, timestamp(), interval, node, recoveryResponse)); } return Collections.unmodifiableCollection(results); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollector.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollector.java index e3ff99d4d19..e64a83dbf5d 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollector.java @@ -49,12 +49,14 @@ public class IndexStatsCollector extends Collector { } @Override - protected boolean shouldCollect() { - return super.shouldCollect() && isLocalNodeMaster(); + protected boolean shouldCollect(final boolean isElectedMaster) { + return isElectedMaster && super.shouldCollect(isElectedMaster); } @Override - protected Collection doCollect(final MonitoringDoc.Node node, final long interval) throws Exception { + protected Collection doCollect(final MonitoringDoc.Node node, + final long interval, + final ClusterState clusterState) throws Exception { final List results = new ArrayList<>(); final IndicesStatsResponse indicesStats = client.admin().indices().prepareStats() .setIndices(getCollectionIndices()) @@ -73,8 +75,7 @@ public class IndexStatsCollector extends Collector { .get(getCollectionTimeout()); final long timestamp = timestamp(); - final String clusterUuid = clusterUUID(); - final ClusterState clusterState = clusterService.state(); + final String clusterUuid = clusterUuid(clusterState); // add the indices stats that we use to collect the index stats results.add(new IndicesStatsMonitoringDoc(clusterUuid, timestamp, interval, node, indicesStats)); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java index a5a2c35b905..bf570bd64e3 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollector.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.monitoring.collector.ml; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; @@ -57,15 +58,18 @@ public class JobStatsCollector extends Collector { } @Override - protected boolean shouldCollect() { + protected boolean shouldCollect(final boolean isElectedMaster) { // This can only run when monitoring is allowed + ML is enabled/allowed, but also only on the elected master node - return super.shouldCollect() && - XPackSettings.MACHINE_LEARNING_ENABLED.get(settings) && licenseState.isMachineLearningAllowed() && - isLocalNodeMaster(); + return isElectedMaster + && super.shouldCollect(isElectedMaster) + && XPackSettings.MACHINE_LEARNING_ENABLED.get(settings) + && licenseState.isMachineLearningAllowed(); } @Override - protected List doCollect(final MonitoringDoc.Node node, final long interval) throws Exception { + protected List doCollect(final MonitoringDoc.Node node, + final long interval, + final ClusterState clusterState) throws Exception { // fetch details about all jobs try (ThreadContext.StoredContext ignore = stashWithOrigin(threadContext, MONITORING_ORIGIN)) { final GetJobsStatsAction.Response jobs = @@ -73,7 +77,7 @@ public class JobStatsCollector extends Collector { .actionGet(getCollectionTimeout()); final long timestamp = timestamp(); - final String clusterUuid = clusterUUID(); + final String clusterUuid = clusterUuid(clusterState); return jobs.getResponse().results().stream() .map(jobStats -> new JobStatsMonitoringDoc(clusterUuid, timestamp, interval, node, jobStats)) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollector.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollector.java index c41905a72a6..c67a08fda5a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollector.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -59,12 +60,14 @@ public class NodeStatsCollector extends Collector { // For testing purpose @Override - protected boolean shouldCollect() { - return super.shouldCollect(); + protected boolean shouldCollect(final boolean isElectedMaster) { + return super.shouldCollect(isElectedMaster); } @Override - protected Collection doCollect(final MonitoringDoc.Node node, final long interval) throws Exception { + protected Collection doCollect(final MonitoringDoc.Node node, + final long interval, + final ClusterState clusterState) throws Exception { NodesStatsRequest request = new NodesStatsRequest("_local"); request.indices(FLAGS); request.os(true); @@ -81,10 +84,11 @@ public class NodeStatsCollector extends Collector { throw response.failures().get(0); } + final String clusterUuid = clusterUuid(clusterState); final NodeStats nodeStats = response.getNodes().get(0); - return Collections.singletonList(new NodeStatsMonitoringDoc(clusterUUID(), nodeStats.getTimestamp(), interval, node, - node.getUUID(), isLocalNodeMaster(), nodeStats, BootstrapInfo.isMemoryLocked())); + return Collections.singletonList(new NodeStatsMonitoringDoc(clusterUuid, nodeStats.getTimestamp(), interval, node, + node.getUUID(), clusterState.getNodes().isLocalNodeElectedMaster(), nodeStats, BootstrapInfo.isMemoryLocked())); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollector.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollector.java index 1257f4bdcc9..f1a23d46d2c 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollector.java @@ -38,21 +38,21 @@ public class ShardsCollector extends Collector { } @Override - protected boolean shouldCollect() { - return super.shouldCollect() && isLocalNodeMaster(); + protected boolean shouldCollect(final boolean isElectedMaster) { + return isElectedMaster && super.shouldCollect(isElectedMaster); } @Override - protected Collection doCollect(final MonitoringDoc.Node node, final long interval) throws Exception { + protected Collection doCollect(final MonitoringDoc.Node node, + final long interval, + final ClusterState clusterState) throws Exception { final List results = new ArrayList<>(1); - - final ClusterState clusterState = clusterService.state(); if (clusterState != null) { RoutingTable routingTable = clusterState.routingTable(); if (routingTable != null) { List shards = routingTable.allShards(); if (shards != null) { - final String clusterUUID = clusterUUID(); + final String clusterUuid = clusterUuid(clusterState); final String stateUUID = clusterState.stateUUID(); final long timestamp = timestamp(); @@ -66,7 +66,7 @@ public class ShardsCollector extends Collector { // If the shard is assigned to a node, the shard monitoring document refers to this node shardNode = convertNode(node.getTimestamp(), clusterState.getNodes().get(shard.currentNodeId())); } - results.add(new ShardMonitoringDoc(clusterUUID, timestamp, interval, shardNode, shard, stateUUID)); + results.add(new ShardMonitoringDoc(clusterUuid, timestamp, interval, shardNode, shard, stateUUID)); } } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringServiceTests.java index 25d33890e12..34d3591dc4c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/MonitoringServiceTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.monitoring; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -48,6 +49,7 @@ public class MonitoringServiceTests extends ESTestCase { final Monitoring monitoring = new Monitoring(Settings.EMPTY, licenseState); clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(monitoring.getSettings())); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + when(clusterService.state()).thenReturn(mock(ClusterState.class)); } @After @@ -59,7 +61,7 @@ public class MonitoringServiceTests extends ESTestCase { } public void testIsMonitoringActive() throws Exception { - monitoringService = new MonitoringService(Settings.EMPTY, clusterSettings, threadPool, emptySet(), new CountingExporter()); + monitoringService = new MonitoringService(Settings.EMPTY, clusterService, threadPool, emptySet(), new CountingExporter()); monitoringService.start(); assertBusy(() -> assertTrue(monitoringService.isStarted())); @@ -82,7 +84,7 @@ public class MonitoringServiceTests extends ESTestCase { Settings settings = Settings.builder().put(MonitoringService.INTERVAL.getKey(), TimeValue.MINUS_ONE).build(); CountingExporter exporter = new CountingExporter(); - monitoringService = new MonitoringService(settings, clusterSettings, threadPool, emptySet(), exporter); + monitoringService = new MonitoringService(settings, clusterService, threadPool, emptySet(), exporter); monitoringService.start(); assertBusy(() -> assertTrue(monitoringService.isStarted())); @@ -105,7 +107,7 @@ public class MonitoringServiceTests extends ESTestCase { final BlockingExporter exporter = new BlockingExporter(latch); Settings settings = Settings.builder().put(MonitoringService.INTERVAL.getKey(), MonitoringService.MIN_INTERVAL).build(); - monitoringService = new MonitoringService(settings, clusterSettings, threadPool, emptySet(), exporter); + monitoringService = new MonitoringService(settings, clusterService, threadPool, emptySet(), exporter); monitoringService.start(); assertBusy(() -> assertTrue(monitoringService.isStarted())); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/BaseCollectorTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/BaseCollectorTestCase.java index 07e2cc7751f..8ae67bb8a3a 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/BaseCollectorTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/BaseCollectorTestCase.java @@ -59,7 +59,7 @@ public abstract class BaseCollectorTestCase extends ESTestCase { protected void whenLocalNodeElectedMaster(final boolean electedMaster) { when(clusterService.state()).thenReturn(clusterState); - when(clusterState.nodes()).thenReturn(nodes); + when(clusterState.getNodes()).thenReturn(nodes); when(nodes.isLocalNodeElectedMaster()).thenReturn(electedMaster); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java index 929c8dd5e9e..9bfe97b1249 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java @@ -66,24 +66,17 @@ public class ClusterStatsCollectorTests extends BaseCollectorTestCase { } public void testShouldCollectReturnsFalseIfNotMaster() { - // this controls the blockage - whenLocalNodeElectedMaster(false); - final ClusterStatsCollector collector = new ClusterStatsCollector(Settings.EMPTY, clusterService, licenseState, client, licenseService); - assertThat(collector.shouldCollect(), is(false)); - verify(nodes).isLocalNodeElectedMaster(); + assertThat(collector.shouldCollect(false), is(false)); } public void testShouldCollectReturnsTrue() { - whenLocalNodeElectedMaster(true); - final ClusterStatsCollector collector = new ClusterStatsCollector(Settings.EMPTY, clusterService, licenseState, client, licenseService); - assertThat(collector.shouldCollect(), is(true)); - verify(nodes).isLocalNodeElectedMaster(); + assertThat(collector.shouldCollect(true), is(true)); } public void testDoAPMIndicesExistReturnsBasedOnIndices() { @@ -219,7 +212,7 @@ public class ClusterStatsCollectorTests extends BaseCollectorTestCase { final long interval = randomNonNegativeLong(); - final Collection results = collector.doCollect(node, interval); + final Collection results = collector.doCollect(node, interval, clusterState); assertEquals(1, results.size()); final MonitoringDoc monitoringDoc = results.iterator().next(); @@ -254,7 +247,8 @@ public class ClusterStatsCollectorTests extends BaseCollectorTestCase { assertThat(document.getClusterState().stateUUID(), equalTo(clusterState.stateUUID())); verify(clusterService, times(1)).getClusterName(); - verify(clusterService, times(2)).state(); + verify(clusterState, times(1)).metaData(); + verify(metaData, times(1)).clusterUUID(); verify(licenseService, times(1)).getLicense(); verify(clusterAdminClient).prepareClusterStats(); verify(client).execute(same(XPackUsageAction.INSTANCE), any(XPackUsageRequest.class)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java index afbc4660c92..ca09d88fd12 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java @@ -44,6 +44,7 @@ import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -52,35 +53,30 @@ public class IndexRecoveryCollectorTests extends BaseCollectorTestCase { public void testShouldCollectReturnsFalseIfMonitoringNotAllowed() { // this controls the blockage when(licenseState.isMonitoringAllowed()).thenReturn(false); - whenLocalNodeElectedMaster(randomBoolean()); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); final IndexRecoveryCollector collector = new IndexRecoveryCollector(Settings.EMPTY, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(false)); - verify(licenseState).isMonitoringAllowed(); + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } } public void testShouldCollectReturnsFalseIfNotMaster() { when(licenseState.isMonitoringAllowed()).thenReturn(true); - // this controls the blockage - whenLocalNodeElectedMaster(false); - final IndexRecoveryCollector collector = new IndexRecoveryCollector(Settings.EMPTY, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(false)); - verify(licenseState).isMonitoringAllowed(); - verify(nodes).isLocalNodeElectedMaster(); + assertThat(collector.shouldCollect(false), is(false)); } public void testShouldCollectReturnsTrue() { when(licenseState.isMonitoringAllowed()).thenReturn(true); - whenLocalNodeElectedMaster(true); - final IndexRecoveryCollector collector = new IndexRecoveryCollector(Settings.EMPTY, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(true)); + assertThat(collector.shouldCollect(true), is(true)); verify(licenseState).isMonitoringAllowed(); - verify(nodes).isLocalNodeElectedMaster(); } public void testDoCollect() throws Exception { @@ -157,8 +153,12 @@ public class IndexRecoveryCollectorTests extends BaseCollectorTestCase { final long interval = randomNonNegativeLong(); - final Collection results = collector.doCollect(node, interval); + final Collection results = collector.doCollect(node, interval, clusterState); verify(indicesAdminClient).prepareRecoveries(); + if (recoveryStates.isEmpty() == false) { + verify(clusterState).metaData(); + verify(metaData).clusterUUID(); + } if (nbRecoveries == 0) { assertEquals(0, results.size()); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java index f7528315d91..f61497798a5 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsCollectorTests.java @@ -36,6 +36,7 @@ import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -44,35 +45,30 @@ public class IndexStatsCollectorTests extends BaseCollectorTestCase { public void testShouldCollectReturnsFalseIfMonitoringNotAllowed() { // this controls the blockage when(licenseState.isMonitoringAllowed()).thenReturn(false); - whenLocalNodeElectedMaster(randomBoolean()); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); final IndexStatsCollector collector = new IndexStatsCollector(Settings.EMPTY, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(false)); - verify(licenseState).isMonitoringAllowed(); + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } } public void testShouldCollectReturnsFalseIfNotMaster() { when(licenseState.isMonitoringAllowed()).thenReturn(true); - // this controls the blockage - whenLocalNodeElectedMaster(false); - final IndexStatsCollector collector = new IndexStatsCollector(Settings.EMPTY, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(false)); - verify(licenseState).isMonitoringAllowed(); - verify(nodes).isLocalNodeElectedMaster(); + assertThat(collector.shouldCollect(false), is(false)); } public void testShouldCollectReturnsTrue() { when(licenseState.isMonitoringAllowed()).thenReturn(true); - whenLocalNodeElectedMaster(true); - final IndexStatsCollector collector = new IndexStatsCollector(Settings.EMPTY, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(true)); + assertThat(collector.shouldCollect(true), is(true)); verify(licenseState).isMonitoringAllowed(); - verify(nodes).isLocalNodeElectedMaster(); } public void testDoCollect() throws Exception { @@ -133,8 +129,11 @@ public class IndexStatsCollectorTests extends BaseCollectorTestCase { final long interval = randomNonNegativeLong(); - final Collection results = collector.doCollect(node, interval); + final Collection results = collector.doCollect(node, interval, clusterState); verify(indicesAdminClient).prepareStats(); + verify(clusterState, times(1 + indices)).metaData(); + verify(clusterState, times(indices)).routingTable(); + verify(metaData).clusterUUID(); assertEquals(1 + indices, results.size()); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollectorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollectorTests.java index 0909741ffd4..2948ed92f3a 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollectorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/ml/JobStatsCollectorTests.java @@ -43,6 +43,8 @@ public class JobStatsCollectorTests extends BaseCollectorTestCase { public void testShouldCollectReturnsFalseIfMonitoringNotAllowed() { final Settings settings = randomFrom(mlEnabledSettings(), mlDisabledSettings()); final boolean mlAllowed = randomBoolean(); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); // this controls the blockage when(licenseState.isMonitoringAllowed()).thenReturn(false); @@ -50,9 +52,10 @@ public class JobStatsCollectorTests extends BaseCollectorTestCase { final JobStatsCollector collector = new JobStatsCollector(settings, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(false)); - - verify(licenseState).isMonitoringAllowed(); + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } } public void testShouldCollectReturnsFalseIfNotMaster() { @@ -62,13 +65,11 @@ public class JobStatsCollectorTests extends BaseCollectorTestCase { when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); when(licenseState.isMachineLearningAllowed()).thenReturn(randomBoolean()); // this controls the blockage - whenLocalNodeElectedMaster(false); + final boolean isElectedMaster = false; final JobStatsCollector collector = new JobStatsCollector(settings, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(false)); - - verify(licenseState).isMonitoringAllowed(); + assertThat(collector.shouldCollect(isElectedMaster), is(false)); } public void testShouldCollectReturnsFalseIfMLIsDisabled() { @@ -77,13 +78,17 @@ public class JobStatsCollectorTests extends BaseCollectorTestCase { when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); when(licenseState.isMachineLearningAllowed()).thenReturn(randomBoolean()); - whenLocalNodeElectedMaster(randomBoolean()); + + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); final JobStatsCollector collector = new JobStatsCollector(settings, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(false)); + assertThat(collector.shouldCollect(isElectedMaster), is(false)); - verify(licenseState).isMonitoringAllowed(); + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } } public void testShouldCollectReturnsFalseIfMLIsNotAllowed() { @@ -92,13 +97,16 @@ public class JobStatsCollectorTests extends BaseCollectorTestCase { when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); // this is controls the blockage when(licenseState.isMachineLearningAllowed()).thenReturn(false); - whenLocalNodeElectedMaster(randomBoolean()); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); final JobStatsCollector collector = new JobStatsCollector(settings, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(false)); + assertThat(collector.shouldCollect(isElectedMaster), is(false)); - verify(licenseState).isMonitoringAllowed(); + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } } public void testShouldCollectReturnsTrue() { @@ -106,18 +114,19 @@ public class JobStatsCollectorTests extends BaseCollectorTestCase { when(licenseState.isMonitoringAllowed()).thenReturn(true); when(licenseState.isMachineLearningAllowed()).thenReturn(true); - whenLocalNodeElectedMaster(true); + final boolean isElectedMaster = true; final JobStatsCollector collector = new JobStatsCollector(settings, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(true)); + assertThat(collector.shouldCollect(isElectedMaster), is(true)); verify(licenseState).isMonitoringAllowed(); } public void testDoCollect() throws Exception { - final MetaData metaData = mock(MetaData.class); final String clusterUuid = randomAlphaOfLength(5); + whenClusterStateWithUUID(clusterUuid); + final MonitoringDoc.Node node = randomMonitoringNode(random()); final MachineLearningClient client = mock(MachineLearningClient.class); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); @@ -125,10 +134,6 @@ public class JobStatsCollectorTests extends BaseCollectorTestCase { final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); withCollectionTimeout(JobStatsCollector.JOB_STATS_TIMEOUT, timeout); - when(clusterService.state()).thenReturn(clusterState); - when(clusterState.metaData()).thenReturn(metaData); - when(metaData.clusterUUID()).thenReturn(clusterUuid); - final JobStatsCollector collector = new JobStatsCollector(Settings.EMPTY, clusterService, licenseState, client, threadContext); assertEquals(timeout, collector.getCollectionTimeout()); @@ -143,7 +148,9 @@ public class JobStatsCollectorTests extends BaseCollectorTestCase { final long interval = randomNonNegativeLong(); - final List monitoringDocs = collector.doCollect(node, interval); + final List monitoringDocs = collector.doCollect(node, interval, clusterState); + verify(clusterState).metaData(); + verify(metaData).clusterUUID(); assertThat(monitoringDocs, hasSize(jobStats.size())); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollectorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollectorTests.java index b6024087a19..2c9449627a0 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollectorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsCollectorTests.java @@ -40,21 +40,24 @@ public class NodeStatsCollectorTests extends BaseCollectorTestCase { public void testShouldCollectReturnsFalseIfMonitoringNotAllowed() { // this controls the blockage when(licenseState.isMonitoringAllowed()).thenReturn(false); - whenLocalNodeElectedMaster(randomBoolean()); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); final NodeStatsCollector collector = new NodeStatsCollector(Settings.EMPTY, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(false)); - verify(licenseState).isMonitoringAllowed(); + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } } public void testShouldCollectReturnsTrue() { when(licenseState.isMonitoringAllowed()).thenReturn(true); - whenLocalNodeElectedMaster(true); + final boolean isElectedMaster = true; final NodeStatsCollector collector = new NodeStatsCollector(Settings.EMPTY, clusterService, licenseState, client); - assertThat(collector.shouldCollect(), is(true)); + assertThat(collector.shouldCollect(isElectedMaster), is(true)); verify(licenseState).isMonitoringAllowed(); } @@ -77,7 +80,7 @@ public class NodeStatsCollectorTests extends BaseCollectorTestCase { assertEquals(timeout, collector.getCollectionTimeout()); final FailedNodeException e = expectThrows(FailedNodeException.class, () -> - collector.doCollect(randomMonitoringNode(random()), randomNonNegativeLong())); + collector.doCollect(randomMonitoringNode(random()), randomNonNegativeLong(), clusterState)); assertEquals(exception, e); } @@ -112,7 +115,10 @@ public class NodeStatsCollectorTests extends BaseCollectorTestCase { final long interval = randomNonNegativeLong(); - final Collection results = collector.doCollect(node, interval); + final Collection results = collector.doCollect(node, interval, clusterState); + verify(clusterState).metaData(); + verify(metaData).clusterUUID(); + assertEquals(1, results.size()); final MonitoringDoc monitoringDoc = results.iterator().next(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollectorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollectorTests.java index 2b8642e0d94..b8753a82a1d 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollectorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/monitoring/collector/shards/ShardsCollectorTests.java @@ -45,12 +45,15 @@ public class ShardsCollectorTests extends BaseCollectorTestCase { public void testShouldCollectReturnsFalseIfMonitoringNotAllowed() { // this controls the blockage when(licenseState.isMonitoringAllowed()).thenReturn(false); - whenLocalNodeElectedMaster(randomBoolean()); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); final ShardsCollector collector = new ShardsCollector(Settings.EMPTY, clusterService, licenseState); - assertThat(collector.shouldCollect(), is(false)); - verify(licenseState).isMonitoringAllowed(); + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } } public void testShouldCollectReturnsFalseIfNotMaster() { @@ -60,9 +63,7 @@ public class ShardsCollectorTests extends BaseCollectorTestCase { final ShardsCollector collector = new ShardsCollector(Settings.EMPTY, clusterService, licenseState); - assertThat(collector.shouldCollect(), is(false)); - verify(licenseState).isMonitoringAllowed(); - verify(nodes).isLocalNodeElectedMaster(); + assertThat(collector.shouldCollect(false), is(false)); } public void testShouldCollectReturnsTrue() { @@ -71,20 +72,16 @@ public class ShardsCollectorTests extends BaseCollectorTestCase { final ShardsCollector collector = new ShardsCollector(Settings.EMPTY, clusterService, licenseState); - assertThat(collector.shouldCollect(), is(true)); + assertThat(collector.shouldCollect(true), is(true)); verify(licenseState).isMonitoringAllowed(); - verify(nodes).isLocalNodeElectedMaster(); } public void testDoCollectWhenNoClusterState() throws Exception { - when(clusterService.state()).thenReturn(null); - final ShardsCollector collector = new ShardsCollector(Settings.EMPTY, clusterService, licenseState); - final Collection results = collector.doCollect(randomMonitoringNode(random()), randomNonNegativeLong()); + final Collection results = collector.doCollect(randomMonitoringNode(random()), randomNonNegativeLong(), null); assertThat(results, notNullValue()); assertThat(results.size(), equalTo(0)); - verify(clusterService).state(); } public void testDoCollect() throws Exception { @@ -114,7 +111,10 @@ public class ShardsCollectorTests extends BaseCollectorTestCase { final long interval = randomNonNegativeLong(); - final Collection results = collector.doCollect(node, interval); + final Collection results = collector.doCollect(node, interval, clusterState); + verify(clusterState).metaData(); + verify(metaData).clusterUUID(); + assertThat(results, notNullValue()); assertThat(results.size(), equalTo((indices != NONE) ? routingTable.allShards().size() : 0)); From 92c40061c56693808cf46b8f9f88d55fd59df7b1 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Tue, 12 Dec 2017 15:12:33 +0000 Subject: [PATCH 09/10] [ML] Skip index health checks for remote indices on datafeed node sel (elastic/x-pack-elasticsearch#3301) Upon selecting a node to run a datafeed we normally check that the data indices exists and their primaries are active. However, these checks cannot be applied for CCS to a remote cluster. This commit skips these checks for remote indices. This removes the last obstacle for running CCS datafeeds. Relates elastic/x-pack-elasticsearch#1071 Original commit: elastic/x-pack-elasticsearch@092f44feeebffae3a54ebbf4581beb56f7e541a1 --- .../xpack/ml/datafeed/DatafeedNodeSelector.java | 10 ++++++++++ .../ml/datafeed/DatafeedNodeSelectorTests.java | 17 +++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java index 49642092224..22c82c0a085 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelector.java @@ -89,6 +89,12 @@ public class DatafeedNodeSelector { private AssignmentFailure verifyIndicesActive(DatafeedConfig datafeed) { List indices = datafeed.getIndices(); for (String index : indices) { + + if (isRemoteIndex(index)) { + // We cannot verify remote indices + continue; + } + String[] concreteIndices; String reason = "cannot start datafeed [" + datafeed.getId() + "] because index [" + index + "] does not exist, is closed, or is still initializing."; @@ -115,6 +121,10 @@ public class DatafeedNodeSelector { return null; } + private boolean isRemoteIndex(String index) { + return index.indexOf(':') != -1; + } + private static class AssignmentFailure { private final String reason; private final boolean isCriticalForTaskCreation; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java index ede2aea6a15..672a2987d1d 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java @@ -226,6 +226,23 @@ public class DatafeedNodeSelectorTests extends ESTestCase { + "[cannot start datafeed [datafeed_id] because index [not_foo] does not exist, is closed, or is still initializing.]")); } + public void testRemoteIndex() { + MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); + Job job = createScheduledJob("job_id").build(new Date()); + mlMetadataBuilder.putJob(job, false); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("remote:foo")), null); + mlMetadata = mlMetadataBuilder.build(); + + PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); + addJobTask(job.getId(), "node_id", JobState.OPENED, tasksBuilder); + tasks = tasksBuilder.build(); + + givenClusterState("foo", 1, 0); + + PersistentTasksCustomMetaData.Assignment result = new DatafeedNodeSelector(clusterState, resolver, "datafeed_id").selectNode(); + assertNotNull(result.getExecutorNode()); + } + public void testSelectNode_jobTaskStale() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); From 2ca729afc2f6f70c69226ec06f94a8e80c5b76b7 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 12 Dec 2017 11:05:51 -0500 Subject: [PATCH 10/10] Fix packaging tests after breaking up x-pack The tests were failing after breaking up x-pack. This makes them pass locally. Original commit: elastic/x-pack-elasticsearch@af2a7866eb35bb2a372dc4475a4e21cac3bf998b --- .../resources/packaging/tests/10_basic.bats | 2 +- .../packaging/tests/bootstrap_password.bash | 2 +- .../resources/packaging/tests/certgen.bash | 52 +++++++++---------- .../resources/packaging/tests/keystore.bash | 2 +- .../packaging/tests/setup_passwords.bash | 3 +- .../test/resources/packaging/utils/xpack.bash | 8 ++- 6 files changed, 36 insertions(+), 33 deletions(-) diff --git a/qa/vagrant/src/test/resources/packaging/tests/10_basic.bats b/qa/vagrant/src/test/resources/packaging/tests/10_basic.bats index 853ec23d796..32f57ef3fb1 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/10_basic.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/10_basic.bats @@ -33,7 +33,7 @@ setup() { count=$(find . -type f -name 'x-pack*.zip' | wc -l) [ "$count" -eq 1 ] - install_and_check_plugin x pack x-pack-*.jar + install_xpack } @test "[X-PACK] verify x-pack installation" { diff --git a/qa/vagrant/src/test/resources/packaging/tests/bootstrap_password.bash b/qa/vagrant/src/test/resources/packaging/tests/bootstrap_password.bash index ee38077e9e4..03e7d3e44ea 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/bootstrap_password.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/bootstrap_password.bash @@ -13,7 +13,7 @@ setup() { clean_before_test install - install_and_check_plugin x pack x-pack-*.jar + install_xpack verify_xpack_installation fi } diff --git a/qa/vagrant/src/test/resources/packaging/tests/certgen.bash b/qa/vagrant/src/test/resources/packaging/tests/certgen.bash index d41eb8a19e0..87b77f3572e 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/certgen.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/certgen.bash @@ -38,7 +38,7 @@ if [[ "$BATS_TEST_FILENAME" =~ 60_tar_certgen.bats$ ]]; then DATA_USER=$DEFAULT_PACKAGE_USER DATA_HOME=$DEFAULT_PACKAGE_ESHOME DATA_UTILS=$DEFAULT_PACKAGE_UTILS - + install_master_node() { install_node_using_archive } @@ -65,7 +65,7 @@ else DATA_USER=$DEFAULT_ARCHIVE_USER DATA_HOME=$DEFAULT_ARCHIVE_ESHOME DATA_UTILS=$DEFAULT_ARCHIVE_UTILS - + install_master_node() { install_node_using_package } @@ -85,12 +85,12 @@ install_node_using_archive() { load $BATS_UTILS/tar.bash export ESHOME="$DEFAULT_ARCHIVE_ESHOME" export_elasticsearch_paths - + install_archive verify_archive_installation export ESPLUGIN_COMMAND_USER=$DEFAULT_ARCHIVE_USER - install_and_check_plugin x pack x-pack-*.jar + install_xpack verify_xpack_installation } @@ -99,7 +99,7 @@ start_node_using_archive() { load $BATS_UTILS/tar.bash export ESHOME="$DEFAULT_ARCHIVE_ESHOME" export_elasticsearch_paths - + run sudo -u $DEFAULT_ARCHIVE_USER "$ESHOME/bin/elasticsearch" -d -p $ESHOME/elasticsearch.pid [ "$status" -eq "0" ] || { echo "Failed to start node using archive: $output" @@ -112,12 +112,12 @@ install_node_using_package() { load $BATS_UTILS/packages.bash export ESHOME="$DEFAULT_PACKAGE_ESHOME" export_elasticsearch_paths - + install_package verify_package_installation export ESPLUGIN_COMMAND_USER=$DEFAULT_PACKAGE_USER - install_and_check_plugin x pack x-pack-*.jar + install_xpack verify_xpack_installation } @@ -126,7 +126,7 @@ start_node_using_package() { if is_systemd; then run systemctl daemon-reload [ "$status" -eq 0 ] - + run sudo systemctl start elasticsearch.service [ "$status" -eq "0" ] @@ -168,7 +168,7 @@ instances: ip: - "127.0.0.1" - name: "node-data" - ip: + ip: - "127.0.0.1" EOF CREATE_INSTANCES_FILE @@ -199,12 +199,12 @@ CREATE_INSTANCES_FILE load $MASTER_UTILS export ESHOME="$MASTER_HOME" export_elasticsearch_paths - + certs="$ESCONFIG/x-pack/certs" if [[ -d "$certs" ]]; then sudo rm -rf "$certs" fi - + run sudo -E -u $MASTER_USER "unzip" $certificates -d $certs [ "$status" -eq 0 ] || { echo "Failed to unzip certificates in $certs: $output" @@ -213,11 +213,11 @@ CREATE_INSTANCES_FILE assert_file "$certs/ca/ca.key" f $MASTER_USER $MASTER_USER 644 assert_file "$certs/ca/ca.crt" f $MASTER_USER $MASTER_USER 644 - + assert_file "$certs/node-master" d $MASTER_USER $MASTER_USER 755 assert_file "$certs/node-master/node-master.key" f $MASTER_USER $MASTER_USER 644 assert_file "$certs/node-master/node-master.crt" f $MASTER_USER $MASTER_USER 644 - + assert_file "$certs/node-data" d $MASTER_USER $MASTER_USER 755 assert_file "$certs/node-data/node-data.key" f $MASTER_USER $MASTER_USER 644 assert_file "$certs/node-data/node-data.crt" f $MASTER_USER $MASTER_USER 644 @@ -235,8 +235,8 @@ node.master: true node.data: false discovery.zen.ping.unicast.hosts: ["127.0.0.1:9301"] -xpack.ssl.key: $ESCONFIG/x-pack/certs/node-master/node-master.key -xpack.ssl.certificate: $ESCONFIG/x-pack/certs/node-master/node-master.crt +xpack.ssl.key: $ESCONFIG/x-pack/certs/node-master/node-master.key +xpack.ssl.certificate: $ESCONFIG/x-pack/certs/node-master/node-master.crt xpack.ssl.certificate_authorities: ["$ESCONFIG/x-pack/certs/ca/ca.crt"] xpack.security.transport.ssl.enabled: true @@ -274,7 +274,7 @@ MASTER_SETTINGS load $DATA_UTILS export ESHOME="$DATA_HOME" export_elasticsearch_paths - + sudo chown $DATA_USER:$DATA_USER "$certificates" [ -f "$certificates" ] || { echo "Could not find certificates: $certificates" @@ -285,7 +285,7 @@ MASTER_SETTINGS if [[ -d "$certs" ]]; then sudo rm -rf "$certs" fi - + run sudo -E -u $DATA_USER "unzip" $certificates -d $certs [ "$status" -eq 0 ] || { echo "Failed to unzip certificates in $certs: $output" @@ -295,11 +295,11 @@ MASTER_SETTINGS assert_file "$certs/ca" d $DATA_USER $DATA_USER assert_file "$certs/ca/ca.key" f $DATA_USER $DATA_USER 644 assert_file "$certs/ca/ca.crt" f $DATA_USER $DATA_USER 644 - + assert_file "$certs/node-master" d $DATA_USER $DATA_USER assert_file "$certs/node-master/node-master.key" f $DATA_USER $DATA_USER 644 assert_file "$certs/node-master/node-master.crt" f $DATA_USER $DATA_USER 644 - + assert_file "$certs/node-data" d $DATA_USER $DATA_USER assert_file "$certs/node-data/node-data.key" f $DATA_USER $DATA_USER 644 assert_file "$certs/node-data/node-data.crt" f $DATA_USER $DATA_USER 644 @@ -317,8 +317,8 @@ node.master: false node.data: true discovery.zen.ping.unicast.hosts: ["127.0.0.1:9300"] -xpack.ssl.key: $ESCONFIG/x-pack/certs/node-data/node-data.key -xpack.ssl.certificate: $ESCONFIG/x-pack/certs/node-data/node-data.crt +xpack.ssl.key: $ESCONFIG/x-pack/certs/node-data/node-data.key +xpack.ssl.certificate: $ESCONFIG/x-pack/certs/node-data/node-data.crt xpack.ssl.certificate_authorities: ["$ESCONFIG/x-pack/certs/ca/ca.crt"] xpack.security.transport.ssl.enabled: true @@ -370,11 +370,11 @@ DATA_SETTINGS echo "$masterSettings" | grep '"http":{"type":"security4"}' echo "$masterSettings" | grep '"transport":{"ssl":{"enabled":"true"}' echo "$masterSettings" | grep '"transport":{"type":"security4"}' - + load $DATA_UTILS export ESHOME="$DATA_HOME" export_elasticsearch_paths - + dataSettings=$(curl -u "elastic:changeme" \ -H "Content-Type: application/json" \ --cacert "$ESCONFIG/x-pack/certs/ca/ca.crt" \ @@ -384,14 +384,12 @@ DATA_SETTINGS echo "$dataSettings" | grep '"http":{"type":"security4"}' echo "$dataSettings" | grep '"transport":{"ssl":{"enabled":"true"}' echo "$dataSettings" | grep '"transport":{"type":"security4"}' - + testSearch=$(curl -u "elastic:changeme" \ -H "Content-Type: application/json" \ --cacert "$ESCONFIG/x-pack/certs/ca/ca.crt" \ -XGET "https://127.0.0.1:9200/_search?q=title:guide") - + echo "$testSearch" | grep '"_index":"books"' echo "$testSearch" | grep '"_id":"0"' } - - diff --git a/qa/vagrant/src/test/resources/packaging/tests/keystore.bash b/qa/vagrant/src/test/resources/packaging/tests/keystore.bash index 25a2768edb7..a5f855f2f55 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/keystore.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/keystore.bash @@ -45,6 +45,6 @@ fi } @test "[$GROUP] keystore exists after install" { - install_and_check_plugin x pack x-pack-*.jar + install_xpack verify_xpack_installation } diff --git a/qa/vagrant/src/test/resources/packaging/tests/setup_passwords.bash b/qa/vagrant/src/test/resources/packaging/tests/setup_passwords.bash index 49e8d0d9430..eac32fc04a0 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/setup_passwords.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/setup_passwords.bash @@ -13,7 +13,7 @@ setup() { clean_before_test install - install_and_check_plugin x pack x-pack-*.jar + install_xpack verify_xpack_installation fi } @@ -81,4 +81,3 @@ SETUP_AUTO stop_elasticsearch_service } - diff --git a/qa/vagrant/src/test/resources/packaging/utils/xpack.bash b/qa/vagrant/src/test/resources/packaging/utils/xpack.bash index fefd5de14a6..afd407aa063 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/xpack.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/xpack.bash @@ -4,6 +4,12 @@ # or more contributor license agreements. Licensed under the Elastic License; # you may not use this file except in compliance with the Elastic License. +install_xpack() { + install_and_check_plugin x pack x-pack-core-*.jar x-pack-graph-*.jar x-pack-ml-*.jar \ + x-pack-monitoring-*.jar x-pack-security-*.jar x-pack-watcher-*.jar +} + +# Checks that X-Pack files are correctly installed verify_xpack_installation() { local user="$ESPLUGIN_COMMAND_USER" local group="$ESPLUGIN_COMMAND_USER" @@ -47,4 +53,4 @@ wait_for_xpack() { for i in {1..30}; do echo "GET / HTTP/1.0" > /dev/tcp/$host/$port && break || sleep 1; done -} \ No newline at end of file +}