From 2e0560528f67994492c21d28fd262b66dc660776 Mon Sep 17 00:00:00 2001 From: Dimitrios Athanasiou Date: Wed, 5 Jul 2017 13:30:25 +0100 Subject: [PATCH 01/11] [TEST] Fix MlBasicMultiNodeIT after changing flush response Relates elastic/x-pack-elasticsearch#1914 Original commit: elastic/x-pack-elasticsearch@5175bf64d97f08220699468953407253884a47bf --- .../xpack/ml/integration/MlBasicMultiNodeIT.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/qa/ml-basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java b/qa/ml-basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java index 8213a119e99..8703e625069 100644 --- a/qa/ml-basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java +++ b/qa/ml-basic-multi-node/src/test/java/org/elasticsearch/xpack/ml/integration/MlBasicMultiNodeIT.java @@ -22,6 +22,8 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentType.JSON; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class MlBasicMultiNodeIT extends ESRestTestCase { @@ -77,7 +79,7 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush"); assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("flushed", true), responseEntityToMap(response)); + assertFlushResponse(response, true, 1403481600000L); response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close", Collections.singletonMap("timeout", "20s")); @@ -204,7 +206,7 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush"); assertEquals(200, response.getStatusLine().getStatusCode()); - assertEquals(Collections.singletonMap("flushed", true), responseEntityToMap(response)); + assertFlushResponse(response, true, 1403481600000L); response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close", Collections.singletonMap("timeout", "20s")); @@ -315,4 +317,11 @@ public class MlBasicMultiNodeIT extends ESRestTestCase { return XContentHelper.convertToMap(JSON.xContent(), response.getEntity().getContent(), false); } + private static void assertFlushResponse(Response response, boolean expectedFlushed, long expectedLastFinalizedBucketEnd) + throws IOException { + Map asMap = responseEntityToMap(response); + assertThat(asMap.size(), equalTo(2)); + assertThat(asMap.get("flushed"), is(true)); + assertThat(asMap.get("last_finalized_bucket_end"), equalTo(expectedLastFinalizedBucketEnd)); + } } From c035adb568f3061769ab7a5495fbbe2c8e514be1 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Wed, 5 Jul 2017 09:47:58 -0400 Subject: [PATCH 02/11] Rename upgrade.* client methods to migration.* (elastic/x-pack-elasticsearch#1881) This makes client names consistent with REST APIs and makes it simplifies client development. Original commit: elastic/x-pack-elasticsearch@90913f485b070ada9f22ba4ec46c81b20c8ffb7e --- ...pgrade.info.json => xpack.migration.get_assistance.json} | 2 +- .../{xpack.upgrade.json => xpack.migration.upgrade.json} | 2 +- .../test/resources/rest-api-spec/test/upgrade/10_basic.yml | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) rename plugin/src/test/resources/rest-api-spec/api/{xpack.upgrade.info.json => xpack.migration.get_assistance.json} (97%) rename plugin/src/test/resources/rest-api-spec/api/{xpack.upgrade.json => xpack.migration.upgrade.json} (94%) diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.upgrade.info.json b/plugin/src/test/resources/rest-api-spec/api/xpack.migration.get_assistance.json similarity index 97% rename from plugin/src/test/resources/rest-api-spec/api/xpack.upgrade.info.json rename to plugin/src/test/resources/rest-api-spec/api/xpack.migration.get_assistance.json index 8cf2876b985..89cc5b2d497 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.upgrade.info.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.migration.get_assistance.json @@ -1,5 +1,5 @@ { - "xpack.upgrade.info": { + "xpack.migration.get_assistance": { "methods": [ "GET" ], "url": { "path": "/_xpack/migration/assistance", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.upgrade.json b/plugin/src/test/resources/rest-api-spec/api/xpack.migration.upgrade.json similarity index 94% rename from plugin/src/test/resources/rest-api-spec/api/xpack.upgrade.json rename to plugin/src/test/resources/rest-api-spec/api/xpack.migration.upgrade.json index 77f2d526d6f..a13977ab40f 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.upgrade.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.migration.upgrade.json @@ -1,5 +1,5 @@ { - "xpack.upgrade": { + "xpack.migration.upgrade": { "methods": [ "POST" ], "url": { "path": "/_xpack/migration/upgrade/{index}", diff --git a/plugin/src/test/resources/rest-api-spec/test/upgrade/10_basic.yml b/plugin/src/test/resources/rest-api-spec/test/upgrade/10_basic.yml index 7a57fd12fc9..33719e676ca 100644 --- a/plugin/src/test/resources/rest-api-spec/test/upgrade/10_basic.yml +++ b/plugin/src/test/resources/rest-api-spec/test/upgrade/10_basic.yml @@ -37,14 +37,14 @@ setup: --- "Upgrade info - all": - do: - xpack.upgrade.info: { index: _all } + xpack.migration.get_assistance: { index: _all } - length: { indices: 0 } --- "Upgrade info - all, but treat test2 as kibana": - do: - xpack.upgrade.info: { index: _all, kibana_indices: test2 } + xpack.migration.get_assistance: { index: _all, kibana_indices: test2 } - length: { indices: 1 } - match: { indices.test2.action_required: "upgrade" } @@ -53,7 +53,7 @@ setup: "Upgrade test2 as kibana index": - do: - xpack.upgrade: { index: test2, kibana_indices: test2 } + xpack.migration.upgrade: { index: test2, kibana_indices: test2 } - match: { total: 1 } - match: { created: 1 } From b66560fa85e7e2a0fd4774207be9668c912ebf4c Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Fri, 23 Jun 2017 10:17:59 -0600 Subject: [PATCH 03/11] [TEST] Use better REST endpoints for testing watch actions This is the x-pack side of https://github.com/elastic/elasticsearch/pull/24437 It changes two things, for the disable tests, it uses a valid endpoint instead of a previously invalid endpoint that happened to return a 400 because the endpoint was bad, regardless of if watcher was disabled. The other change is to create the watches index by putting a watch using the correct API, rather than manually creating the index. This is because `RestHijackOperationAction` hijacks operations like this and stops accessing the endpoint in a regular manner. Original commit: elastic/x-pack-elasticsearch@3be78d9aea02080f5cd00d9b0bace78eec05616e --- .../watcher/WatcherPluginDisableTests.java | 2 +- .../test/watcher/get_watch/20_missing.yml | 36 ++++++++++++++++--- 2 files changed, 32 insertions(+), 6 deletions(-) diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginDisableTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginDisableTests.java index 4215bf1aaae..904cb51c1dd 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginDisableTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginDisableTests.java @@ -70,7 +70,7 @@ public class WatcherPluginDisableTests extends ESIntegTestCase { public void testRestEndpoints() throws Exception { try { - getRestClient().performRequest("GET", "/_xpack/watcher"); + getRestClient().performRequest("GET", "/_xpack/watcher/watch/my-watch"); fail("request should have failed"); } catch(ResponseException e) { assertThat(e.getResponse().getStatusLine().getStatusCode(), is(HttpStatus.SC_BAD_REQUEST)); diff --git a/plugin/src/test/resources/rest-api-spec/test/watcher/get_watch/20_missing.yml b/plugin/src/test/resources/rest-api-spec/test/watcher/get_watch/20_missing.yml index 3083b8bbb92..ee4fd2e7e43 100644 --- a/plugin/src/test/resources/rest-api-spec/test/watcher/get_watch/20_missing.yml +++ b/plugin/src/test/resources/rest-api-spec/test/watcher/get_watch/20_missing.yml @@ -4,12 +4,38 @@ cluster.health: wait_for_status: yellow - # ensure index exists, but ignore if it does already - # this test should ensure the watch is missing, while the index is there + # ensure index exists by creating a different watch - do: - indices.create: - index: .watches - ignore: 400 + xpack.watcher.put_watch: + id: "other" + body: > + { + "trigger": { + "schedule": { + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "payload": { + "send": "yes" + } + } + }, + "condition": { + "always": {} + }, + "actions": { + "test_index": { + "index": { + "index": "test", + "doc_type": "test2" + } + } + } + } - do: catch: missing From adc6fd5a0fea7e6fc7982cc47451bf32f2e61a86 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Thu, 6 Jul 2017 12:29:12 +0100 Subject: [PATCH 04/11] [ML] Parse model_memory_limit both as number and string with units (elastic/x-pack-elasticsearch#1921) This is the first step for elastic/x-pack-elasticsearch#1604. Original commit: elastic/x-pack-elasticsearch@70010a216d34e63a9c9b3edd5c9388b375e376fc --- .../xpack/ml/job/config/AnalysisLimits.java | 21 ++++- .../xpack/ml/job/messages/Messages.java | 1 + .../writer/AnalysisLimitsWriter.java | 9 +-- .../ml/job/config/AnalysisLimitsTests.java | 76 ++++++++++++++++++- .../writer/AnalysisLimitsWriterTests.java | 11 +-- .../rest-api-spec/test/ml/jobs_crud.yml | 40 ++++++++++ 6 files changed, 137 insertions(+), 21 deletions(-) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java index 9e7118c0a33..9fa105fa1f4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java @@ -10,9 +10,12 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; @@ -35,13 +38,21 @@ public class AnalysisLimits implements ToXContentObject, Writeable { "analysis_limits", a -> new AnalysisLimits((Long) a[0], (Long) a[1])); static { - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), MODEL_MEMORY_LIMIT); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()).getMb(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.longValue(); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, MODEL_MEMORY_LIMIT, ObjectParser.ValueType.VALUE); PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT); } /** + * The model memory limit in MiBs. * It is initialised to null. - * A value of null or 0 will result to the default being used. + * A value of null will result to the default being used. */ private final Long modelMemoryLimit; @@ -52,12 +63,16 @@ public class AnalysisLimits implements ToXContentObject, Writeable { private final Long categorizationExamplesLimit; public AnalysisLimits(Long modelMemoryLimit, Long categorizationExamplesLimit) { - this.modelMemoryLimit = modelMemoryLimit; + if (modelMemoryLimit != null && modelMemoryLimit < 1) { + String msg = Messages.getMessage(Messages.JOB_CONFIG_MODEL_MEMORY_LIMIT_TOO_LOW, modelMemoryLimit); + throw ExceptionsHelper.badRequestException(msg); + } if (categorizationExamplesLimit != null && categorizationExamplesLimit < 0) { String msg = Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, CATEGORIZATION_EXAMPLES_LIMIT, 0, categorizationExamplesLimit); throw ExceptionsHelper.badRequestException(msg); } + this.modelMemoryLimit = modelMemoryLimit; this.categorizationExamplesLimit = categorizationExamplesLimit; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java index 7add56f64f1..9e8ada6a439 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java @@ -93,6 +93,7 @@ public final class Messages { "Invalid detector rule: at least one rule_condition is required"; public static final String JOB_CONFIG_FIELDNAME_INCOMPATIBLE_FUNCTION = "field_name cannot be used with function ''{0}''"; public static final String JOB_CONFIG_FIELD_VALUE_TOO_LOW = "{0} cannot be less than {1,number}. Value = {2,number}"; + public static final String JOB_CONFIG_MODEL_MEMORY_LIMIT_TOO_LOW = "model_memory_limit must be at least 1 MiB. Value = {0,number}"; public static final String JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED = "The ''{0}'' function cannot be used in jobs that will take pre-summarized input"; public static final String JOB_CONFIG_FUNCTION_REQUIRES_BYFIELD = "by_field_name must be set when the ''{0}'' function is used"; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/AnalysisLimitsWriter.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/AnalysisLimitsWriter.java index 17d8d849b4a..021eb0baa57 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/AnalysisLimitsWriter.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/writer/AnalysisLimitsWriter.java @@ -33,16 +33,13 @@ public class AnalysisLimitsWriter { public void write() throws IOException { StringBuilder contents = new StringBuilder(MEMORY_STANZA_STR).append(NEW_LINE); - if (limits.getModelMemoryLimit() != null && limits.getModelMemoryLimit() != 0L) { - contents.append(MODEL_MEMORY_LIMIT_CONFIG_STR + EQUALS) - .append(limits.getModelMemoryLimit()).append(NEW_LINE); + if (limits.getModelMemoryLimit() != null) { + contents.append(MODEL_MEMORY_LIMIT_CONFIG_STR + EQUALS).append(limits.getModelMemoryLimit()).append(NEW_LINE); } contents.append(RESULTS_STANZA_STR).append(NEW_LINE); if (limits.getCategorizationExamplesLimit() != null) { - contents.append(MAX_EXAMPLES_LIMIT_CONFIG_STR + EQUALS) - .append(limits.getCategorizationExamplesLimit()) - .append(NEW_LINE); + contents.append(MAX_EXAMPLES_LIMIT_CONFIG_STR + EQUALS).append(limits.getCategorizationExamplesLimit()).append(NEW_LINE); } writer.write(contents.toString()); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimitsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimitsTests.java index 22aaad78d22..bab9c26e9eb 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimitsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimitsTests.java @@ -6,16 +6,26 @@ package org.elasticsearch.xpack.ml.job.config; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.ml.job.messages.Messages; +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + public class AnalysisLimitsTests extends AbstractSerializingTestCase { @Override protected AnalysisLimits createTestInstance() { - return new AnalysisLimits(randomBoolean() ? randomLong() : null, randomBoolean() ? randomNonNegativeLong() : null); + return new AnalysisLimits(randomBoolean() ? (long) randomIntBetween(1, 1000000) : null, + randomBoolean() ? randomNonNegativeLong() : null); } @Override @@ -28,6 +38,68 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase AnalysisLimits.PARSER.apply(parser, null)); + assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = -1")); + } + + public void testParseModelMemoryLimitGivenZero() throws IOException { + String json = "{\"model_memory_limit\": 0}"; + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json); + ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.PARSER.apply(parser, null)); + assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0")); + } + + public void testParseModelMemoryLimitGivenPositiveNumber() throws IOException { + String json = "{\"model_memory_limit\": 2048}"; + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json); + + AnalysisLimits limits = AnalysisLimits.PARSER.apply(parser, null); + + assertThat(limits.getModelMemoryLimit(), equalTo(2048L)); + } + + public void testParseModelMemoryLimitGivenNegativeString() throws IOException { + String json = "{\"model_memory_limit\":\"-4MB\"}"; + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json); + ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.PARSER.apply(parser, null)); + assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = -4")); + } + + public void testParseModelMemoryLimitGivenZeroString() throws IOException { + String json = "{\"model_memory_limit\":\"0MB\"}"; + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json); + ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.PARSER.apply(parser, null)); + assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0")); + } + + public void testParseModelMemoryLimitGivenLessThanOneMBString() throws IOException { + String json = "{\"model_memory_limit\":\"1000Kb\"}"; + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json); + ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.PARSER.apply(parser, null)); + assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0")); + } + + public void testParseModelMemoryLimitGivenStringMultipleOfMBs() throws IOException { + String json = "{\"model_memory_limit\":\"4g\"}"; + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json); + + AnalysisLimits limits = AnalysisLimits.PARSER.apply(parser, null); + + assertThat(limits.getModelMemoryLimit(), equalTo(4096L)); + } + + public void testParseModelMemoryLimitGivenStringNonMultipleOfMBs() throws IOException { + String json = "{\"model_memory_limit\":\"1300kb\"}"; + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json); + + AnalysisLimits limits = AnalysisLimits.PARSER.apply(parser, null); + + assertThat(limits.getModelMemoryLimit(), equalTo(1L)); + } + public void testEquals_GivenEqual() { AnalysisLimits analysisLimits1 = new AnalysisLimits(10L, 20L); AnalysisLimits analysisLimits2 = new AnalysisLimits(10L, 20L); @@ -71,7 +143,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase + { + "analysis_config" : { + "detectors" :[{"function":"count"}] + }, + "data_description" : { + }, + "analysis_limits": { + "model_memory_limit": 2048 + } + } + - match: { job_id: "job-model-memory-limit-as-number" } + - match: { analysis_limits.model_memory_limit: 2048 } + +--- +"Test put job with model_memory_limit as string": + + - do: + xpack.ml.put_job: + job_id: job-model-memory-limit-as-string + body: > + { + "analysis_config" : { + "detectors" :[{"function":"count"}] + }, + "data_description" : { + }, + "analysis_limits": { + "model_memory_limit": "3g" + } + } + - match: { job_id: "job-model-memory-limit-as-string" } + - match: { analysis_limits.model_memory_limit: 3072 } + --- "Test get job API with non existing job id": - do: From 984d2ca2ba2ac487337636b39eeb7b705c7ae3b2 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Thu, 6 Jul 2017 13:36:11 +0100 Subject: [PATCH 05/11] [ML] Ignore unknown fields when parsing ML cluster state (elastic/x-pack-elasticsearch#1924) ML has two types of custom cluster state: 1. jobs 2. datafeeds These need to be parsed from JSON in two situations: 1. Create/update of the job/datafeed 2. Restoring cluster state on startup Previously we used exactly the same parser in both situations, but this severely limits our ability to add new features. This is because the parser was very strict. This was good when accepting create/update requests from users, but when restoring cluster state from disk it meant that we could not add new fields, as that would prevent reloading in mixed version clusters. This commit introduces a second parser that tolerates unknown fields for each object that is stored in cluster state. Then we use this more tolerant parser when parsing cluster state, but still use the strict parser when parsing REST requests. relates elastic/x-pack-elasticsearch#1732 Original commit: elastic/x-pack-elasticsearch@754e51d1ece29f321d2789dd0e9009edae994b58 --- .../xpack/ml/MachineLearning.java | 2 +- .../elasticsearch/xpack/ml/MlMetadata.java | 8 +- .../elasticsearch/xpack/ml/MlParserType.java | 19 ++++ .../xpack/ml/action/PutDatafeedAction.java | 2 +- .../xpack/ml/action/PutJobAction.java | 2 +- .../ml/action/ValidateDetectorAction.java | 2 +- .../ml/action/ValidateJobConfigAction.java | 2 +- .../xpack/ml/datafeed/ChunkingConfig.java | 42 +++++--- .../xpack/ml/datafeed/DatafeedConfig.java | 58 +++++----- .../xpack/ml/datafeed/DatafeedUpdate.java | 2 +- .../xpack/ml/job/config/AnalysisConfig.java | 54 ++++++---- .../xpack/ml/job/config/AnalysisLimits.java | 36 +++++-- .../xpack/ml/job/config/DataDescription.java | 26 +++-- .../xpack/ml/job/config/DetectionRule.java | 48 ++++++--- .../xpack/ml/job/config/Detector.java | 46 +++++--- .../xpack/ml/job/config/Job.java | 100 ++++++++++-------- .../xpack/ml/job/config/JobUpdate.java | 8 +- .../xpack/ml/job/config/ModelPlotConfig.java | 23 +++- .../xpack/ml/job/config/RuleCondition.java | 39 ++++--- .../xpack/ml/MlMetadataTests.java | 2 +- .../ml/datafeed/ChunkingConfigTests.java | 4 +- .../ml/datafeed/DatafeedConfigTests.java | 26 ++++- .../ml/job/config/AnalysisConfigTests.java | 2 +- .../ml/job/config/AnalysisLimitsTests.java | 18 ++-- .../ml/job/config/DataDescriptionTests.java | 8 +- .../ml/job/config/DetectionRuleTests.java | 2 +- .../xpack/ml/job/config/DetectorTests.java | 2 +- .../xpack/ml/job/config/JobBuilderTests.java | 2 +- .../xpack/ml/job/config/JobTests.java | 31 +++++- .../ml/job/config/ModelPlotConfigTests.java | 2 +- .../ml/job/config/RuleConditionTests.java | 2 +- 31 files changed, 416 insertions(+), 204 deletions(-) create mode 100644 plugin/src/main/java/org/elasticsearch/xpack/ml/MlParserType.java diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index f13cadedd23..9e2e2413886 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -233,7 +233,7 @@ public class MachineLearning implements ActionPlugin { return Arrays.asList( // Custom metadata new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField("ml"), - parser -> MlMetadata.ML_METADATA_PARSER.parse(parser, null).build()), + parser -> MlMetadata.METADATA_PARSER.parse(parser, null).build()), new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField(PersistentTasksCustomMetaData.TYPE), PersistentTasksCustomMetaData::fromXContent), diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java index 1057b95b534..5f54f5cb910 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlMetadata.java @@ -53,11 +53,13 @@ public class MlMetadata implements MetaData.Custom { public static final String TYPE = "ml"; public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(), Collections.emptySortedMap()); - public static final ObjectParser ML_METADATA_PARSER = new ObjectParser<>("ml_metadata", Builder::new); + // This parser follows the pattern that metadata is parsed leniently (to allow for enhancements) + public static final ObjectParser METADATA_PARSER = new ObjectParser<>("ml_metadata", true, Builder::new); static { - ML_METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.PARSER.apply(p, c).build(), JOBS_FIELD); - ML_METADATA_PARSER.declareObjectArray(Builder::putDatafeeds, (p, c) -> DatafeedConfig.PARSER.apply(p, c).build(), DATAFEEDS_FIELD); + METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.METADATA_PARSER.apply(p, c).build(), JOBS_FIELD); + METADATA_PARSER.declareObjectArray(Builder::putDatafeeds, + (p, c) -> DatafeedConfig.METADATA_PARSER.apply(p, c).build(), DATAFEEDS_FIELD); } private final SortedMap jobs; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/MlParserType.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlParserType.java new file mode 100644 index 00000000000..8f33486a0b3 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/MlParserType.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml; + +/** + * In order to allow enhancements that require additions to the ML custom cluster state to be made in minor versions, + * when we parse our metadata from persisted cluster state we ignore unknown fields. However, we don't want to be + * lenient when parsing config as this would mean user mistakes could go undetected. Therefore, for all JSON objects + * that are used in both custom cluster state and config we have two parsers, one tolerant of unknown fields (for + * parsing cluster state) and one strict (for parsing config). This class enumerates the two options. + */ +public enum MlParserType { + + METADATA, CONFIG; + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java index e01028fa464..00982e5e797 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutDatafeedAction.java @@ -74,7 +74,7 @@ public class PutDatafeedAction extends Action implements ToXContent { public static Request parseRequest(String datafeedId, XContentParser parser) { - DatafeedConfig.Builder datafeed = DatafeedConfig.PARSER.apply(parser, null); + DatafeedConfig.Builder datafeed = DatafeedConfig.CONFIG_PARSER.apply(parser, null); datafeed.setId(datafeedId); return new Request(datafeed.build()); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutJobAction.java index a032d582148..cdbaf89d65e 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutJobAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/PutJobAction.java @@ -64,7 +64,7 @@ public class PutJobAction extends Action implements ToXContent { public static Request parseRequest(String jobId, XContentParser parser) { - Job.Builder jobBuilder = Job.PARSER.apply(parser, null); + Job.Builder jobBuilder = Job.CONFIG_PARSER.apply(parser, null); if (jobBuilder.getId() == null) { jobBuilder.setId(jobId); } else if (!Strings.isNullOrEmpty(jobId) && !jobId.equals(jobBuilder.getId())) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/ValidateDetectorAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/ValidateDetectorAction.java index 464b380877a..fa109f37262 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/ValidateDetectorAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/ValidateDetectorAction.java @@ -62,7 +62,7 @@ extends Action PARSER = new ConstructingObjectParser<>( - "chunking_config", a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1])); + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly + public static final ConstructingObjectParser METADATA_PARSER = new ConstructingObjectParser<>( + "chunking_config", true, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1])); + public static final ConstructingObjectParser CONFIG_PARSER = new ConstructingObjectParser<>( + "chunking_config", false, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1])); + public static final Map> PARSERS = + new EnumMap<>(MlParserType.class); static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return Mode.fromString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, MODE_FIELD, ValueType.STRING); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return TimeValue.parseTimeValue(p.text(), TIME_SPAN_FIELD.getPreferredName()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, TIME_SPAN_FIELD, ValueType.STRING); + PARSERS.put(MlParserType.METADATA, METADATA_PARSER); + PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); + for (MlParserType parserType : MlParserType.values()) { + ConstructingObjectParser parser = PARSERS.get(parserType); + assert parser != null; + parser.declareField(ConstructingObjectParser.constructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return Mode.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, MODE_FIELD, ValueType.STRING); + parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return TimeValue.parseTimeValue(p.text(), TIME_SPAN_FIELD.getPreferredName()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, TIME_SPAN_FIELD, ValueType.STRING); + } } private final Mode mode; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfig.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfig.java index 8519575ed40..67d286e0c46 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfig.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedConfig.java @@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.ml.MlParserType; import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; @@ -37,7 +38,9 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.EnumMap; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; @@ -74,35 +77,42 @@ public class DatafeedConfig extends AbstractDiffable implements public static final ParseField SOURCE = new ParseField("_source"); public static final ParseField CHUNKING_CONFIG = new ParseField("chunking_config"); - public static final ObjectParser PARSER = new ObjectParser<>("datafeed_config", Builder::new); + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly + public static final ObjectParser METADATA_PARSER = new ObjectParser<>("datafeed_config", true, Builder::new); + public static final ObjectParser CONFIG_PARSER = new ObjectParser<>("datafeed_config", false, Builder::new); + public static final Map> PARSERS = new EnumMap<>(MlParserType.class); static { - PARSER.declareString(Builder::setId, ID); - PARSER.declareString(Builder::setJobId, Job.ID); - PARSER.declareStringArray(Builder::setIndices, INDEXES); - PARSER.declareStringArray(Builder::setIndices, INDICES); - PARSER.declareStringArray(Builder::setTypes, TYPES); - PARSER.declareString((builder, val) -> - builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY); - PARSER.declareString((builder, val) -> - builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY); - PARSER.declareObject(Builder::setQuery, - (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY); - PARSER.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), - AGGREGATIONS); - PARSER.declareObject(Builder::setAggregations,(p, c) -> AggregatorFactories.parseAggregators(p), AGGS); - PARSER.declareObject(Builder::setScriptFields, (p, c) -> { + PARSERS.put(MlParserType.METADATA, METADATA_PARSER); + PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); + for (MlParserType parserType : MlParserType.values()) { + ObjectParser parser = PARSERS.get(parserType); + assert parser != null; + parser.declareString(Builder::setId, ID); + parser.declareString(Builder::setJobId, Job.ID); + parser.declareStringArray(Builder::setIndices, INDEXES); + parser.declareStringArray(Builder::setIndices, INDICES); + parser.declareStringArray(Builder::setTypes, TYPES); + parser.declareString((builder, val) -> + builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY); + parser.declareString((builder, val) -> + builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY); + parser.declareObject(Builder::setQuery, (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY); + parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGREGATIONS); + parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGS); + parser.declareObject(Builder::setScriptFields, (p, c) -> { List parsedScriptFields = new ArrayList<>(); while (p.nextToken() != XContentParser.Token.END_OBJECT) { parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); - } - parsedScriptFields.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); - return parsedScriptFields; - }, SCRIPT_FIELDS); - PARSER.declareInt(Builder::setScrollSize, SCROLL_SIZE); - // TODO this is to read former _source field. Remove in v7.0.0 - PARSER.declareBoolean((builder, value) -> {}, SOURCE); - PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, CHUNKING_CONFIG); + } + parsedScriptFields.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); + return parsedScriptFields; + }, SCRIPT_FIELDS); + parser.declareInt(Builder::setScrollSize, SCROLL_SIZE); + // TODO this is to read former _source field. Remove in v7.0.0 + parser.declareBoolean((builder, value) -> {}, SOURCE); + parser.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSERS.get(parserType), CHUNKING_CONFIG); + } } private final String id; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java index 2d732c4a5dc..f95f55bfe43 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedUpdate.java @@ -63,7 +63,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject { return parsedScriptFields; }, DatafeedConfig.SCRIPT_FIELDS); PARSER.declareInt(Builder::setScrollSize, DatafeedConfig.SCROLL_SIZE); - PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, DatafeedConfig.CHUNKING_CONFIG); + PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.CONFIG_PARSER, DatafeedConfig.CHUNKING_CONFIG); } private final String id; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java index 59bb1f84cd9..e352671bc00 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.ml.MlParserType; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.time.TimeUtils; @@ -21,8 +22,10 @@ import org.elasticsearch.xpack.ml.utils.time.TimeUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.EnumMap; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.SortedSet; @@ -70,27 +73,42 @@ public class AnalysisConfig implements ToXContentObject, Writeable { public static final long DEFAULT_RESULT_FINALIZATION_WINDOW = 2L; + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), a -> new AnalysisConfig.Builder((List) a[0])); + public static final ConstructingObjectParser METADATA_PARSER = + new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), true, + a -> new AnalysisConfig.Builder((List) a[0])); + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser CONFIG_PARSER = + new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), false, + a -> new AnalysisConfig.Builder((List) a[0])); + public static final Map> PARSERS = + new EnumMap<>(MlParserType.class); static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> Detector.PARSER.apply(p, c).build(), DETECTORS); - PARSER.declareString((builder, val) -> - builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), BUCKET_SPAN); - PARSER.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME); - PARSER.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS); - PARSER.declareString((builder, val) -> - builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY); - PARSER.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME); - PARSER.declareStringArray(Builder::setInfluencers, INFLUENCERS); - PARSER.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS); - PARSER.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW); - PARSER.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS); - PARSER.declareStringArray((builder, values) -> builder.setMultipleBucketSpans( - values.stream().map(v -> TimeValue.parseTimeValue(v, MULTIPLE_BUCKET_SPANS.getPreferredName())) - .collect(Collectors.toList())), MULTIPLE_BUCKET_SPANS); - PARSER.declareBoolean(Builder::setUsePerPartitionNormalization, USER_PER_PARTITION_NORMALIZATION); + PARSERS.put(MlParserType.METADATA, METADATA_PARSER); + PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); + for (MlParserType parserType : MlParserType.values()) { + ConstructingObjectParser parser = PARSERS.get(parserType); + assert parser != null; + parser.declareObjectArray(ConstructingObjectParser.constructorArg(), + (p, c) -> Detector.PARSERS.get(parserType).apply(p, c).build(), DETECTORS); + parser.declareString((builder, val) -> + builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), BUCKET_SPAN); + parser.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME); + parser.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS); + parser.declareString((builder, val) -> + builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY); + parser.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME); + parser.declareStringArray(Builder::setInfluencers, INFLUENCERS); + parser.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS); + parser.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW); + parser.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS); + parser.declareStringArray((builder, values) -> builder.setMultipleBucketSpans( + values.stream().map(v -> TimeValue.parseTimeValue(v, MULTIPLE_BUCKET_SPANS.getPreferredName())) + .collect(Collectors.toList())), MULTIPLE_BUCKET_SPANS); + parser.declareBoolean(Builder::setUsePerPartitionNormalization, USER_PER_PARTITION_NORMALIZATION); + } } /** diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java index 9fa105fa1f4..0cf3a22a7f9 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java @@ -16,10 +16,13 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.ml.MlParserType; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; +import java.util.EnumMap; +import java.util.Map; import java.util.Objects; /** @@ -34,19 +37,30 @@ public class AnalysisLimits implements ToXContentObject, Writeable { public static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit"); public static final ParseField CATEGORIZATION_EXAMPLES_LIMIT = new ParseField("categorization_examples_limit"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "analysis_limits", a -> new AnalysisLimits((Long) a[0], (Long) a[1])); + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly + public static final ConstructingObjectParser METADATA_PARSER = new ConstructingObjectParser<>( + "analysis_limits", true, a -> new AnalysisLimits((Long) a[0], (Long) a[1])); + public static final ConstructingObjectParser CONFIG_PARSER = new ConstructingObjectParser<>( + "analysis_limits", false, a -> new AnalysisLimits((Long) a[0], (Long) a[1])); + public static final Map> PARSERS = + new EnumMap<>(MlParserType.class); static { - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()).getMb(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.longValue(); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, MODEL_MEMORY_LIMIT, ObjectParser.ValueType.VALUE); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT); + PARSERS.put(MlParserType.METADATA, METADATA_PARSER); + PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); + for (MlParserType parserType : MlParserType.values()) { + ConstructingObjectParser parser = PARSERS.get(parserType); + assert parser != null; + parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()).getMb(); + } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.longValue(); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, MODEL_MEMORY_LIMIT, ObjectParser.ValueType.VALUE); + parser.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT); + } } /** diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DataDescription.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DataDescription.java index d29063e8635..3093a874987 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DataDescription.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DataDescription.java @@ -14,13 +14,16 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.ml.MlParserType; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.time.DateTimeFormatterTimestampConverter; import java.io.IOException; import java.time.ZoneOffset; +import java.util.EnumMap; import java.util.Locale; +import java.util.Map; import java.util.Objects; /** @@ -122,14 +125,25 @@ public class DataDescription implements ToXContentObject, Writeable { private final Character fieldDelimiter; private final Character quoteCharacter; - public static final ObjectParser PARSER = new ObjectParser<>(DATA_DESCRIPTION_FIELD.getPreferredName(), Builder::new); + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly + public static final ObjectParser METADATA_PARSER = + new ObjectParser<>(DATA_DESCRIPTION_FIELD.getPreferredName(), true, Builder::new); + public static final ObjectParser CONFIG_PARSER = + new ObjectParser<>(DATA_DESCRIPTION_FIELD.getPreferredName(), false, Builder::new); + public static final Map> PARSERS = new EnumMap<>(MlParserType.class); static { - PARSER.declareString(Builder::setFormat, FORMAT_FIELD); - PARSER.declareString(Builder::setTimeField, TIME_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setTimeFormat, TIME_FORMAT_FIELD); - PARSER.declareField(Builder::setFieldDelimiter, DataDescription::extractChar, FIELD_DELIMITER_FIELD, ValueType.STRING); - PARSER.declareField(Builder::setQuoteCharacter, DataDescription::extractChar, QUOTE_CHARACTER_FIELD, ValueType.STRING); + PARSERS.put(MlParserType.METADATA, METADATA_PARSER); + PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); + for (MlParserType parserType : MlParserType.values()) { + ObjectParser parser = PARSERS.get(parserType); + assert parser != null; + parser.declareString(Builder::setFormat, FORMAT_FIELD); + parser.declareString(Builder::setTimeField, TIME_FIELD_NAME_FIELD); + parser.declareString(Builder::setTimeFormat, TIME_FORMAT_FIELD); + parser.declareField(Builder::setFieldDelimiter, DataDescription::extractChar, FIELD_DELIMITER_FIELD, ValueType.STRING); + parser.declareField(Builder::setQuoteCharacter, DataDescription::extractChar, QUOTE_CHARACTER_FIELD, ValueType.STRING); + } } public DataDescription(DataFormat dataFormat, String timeFieldName, String timeFormat, Character fieldDelimiter, diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DetectionRule.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DetectionRule.java index ecc30050f86..8b244bc47c7 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DetectionRule.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/DetectionRule.java @@ -15,13 +15,16 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.ml.MlParserType; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.EnumMap; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; @@ -35,25 +38,36 @@ public class DetectionRule implements ToXContentObject, Writeable { public static final ParseField CONDITIONS_CONNECTIVE_FIELD = new ParseField("conditions_connective"); public static final ParseField RULE_CONDITIONS_FIELD = new ParseField("rule_conditions"); - public static final ObjectParser PARSER = new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), Builder::new); + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly + public static final ObjectParser METADATA_PARSER = + new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), true, Builder::new); + public static final ObjectParser CONFIG_PARSER = + new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), false, Builder::new); + public static final Map> PARSERS = new EnumMap<>(MlParserType.class); static { - PARSER.declareField(Builder::setRuleAction, p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return RuleAction.fromString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, RULE_ACTION_FIELD, ValueType.STRING); - PARSER.declareString(Builder::setTargetFieldName, TARGET_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setTargetFieldValue, TARGET_FIELD_VALUE_FIELD); - PARSER.declareField(Builder::setConditionsConnective, p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return Connective.fromString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, CONDITIONS_CONNECTIVE_FIELD, ValueType.STRING); - PARSER.declareObjectArray(Builder::setRuleConditions, - (parser, parseFieldMatcher) -> RuleCondition.PARSER.apply(parser, parseFieldMatcher), RULE_CONDITIONS_FIELD); + PARSERS.put(MlParserType.METADATA, METADATA_PARSER); + PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); + for (MlParserType parserType : MlParserType.values()) { + ObjectParser parser = PARSERS.get(parserType); + assert parser != null; + parser.declareField(Builder::setRuleAction, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return RuleAction.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, RULE_ACTION_FIELD, ValueType.STRING); + parser.declareString(Builder::setTargetFieldName, TARGET_FIELD_NAME_FIELD); + parser.declareString(Builder::setTargetFieldValue, TARGET_FIELD_VALUE_FIELD); + parser.declareField(Builder::setConditionsConnective, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return Connective.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, CONDITIONS_CONNECTIVE_FIELD, ValueType.STRING); + parser.declareObjectArray(Builder::setRuleConditions, (p, c) -> + RuleCondition.PARSERS.get(parserType).apply(p, c), RULE_CONDITIONS_FIELD); + } } private final RuleAction ruleAction; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Detector.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Detector.java index 77120051e21..c167e75089a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Detector.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Detector.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.ml.MlParserType; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.job.process.autodetect.writer.RecordWriter; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; @@ -25,9 +26,11 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.EnumMap; import java.util.HashSet; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; @@ -83,25 +86,34 @@ public class Detector implements ToXContentObject, Writeable { public static final ParseField DETECTOR_RULES_FIELD = new ParseField("detector_rules"); public static final ParseField DETECTOR_INDEX = new ParseField("detector_index"); - public static final ObjectParser PARSER = new ObjectParser<>("detector", Builder::new); + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly + public static final ObjectParser METADATA_PARSER = new ObjectParser<>("detector", true, Builder::new); + public static final ObjectParser CONFIG_PARSER = new ObjectParser<>("detector", false, Builder::new); + public static final Map> PARSERS = new EnumMap<>(MlParserType.class); static { - PARSER.declareString(Builder::setDetectorDescription, DETECTOR_DESCRIPTION_FIELD); - PARSER.declareString(Builder::setFunction, FUNCTION_FIELD); - PARSER.declareString(Builder::setFieldName, FIELD_NAME_FIELD); - PARSER.declareString(Builder::setByFieldName, BY_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setOverFieldName, OVER_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD); - PARSER.declareBoolean(Builder::setUseNull, USE_NULL_FIELD); - PARSER.declareField(Builder::setExcludeFrequent, p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ExcludeFrequent.forString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, EXCLUDE_FREQUENT_FIELD, ObjectParser.ValueType.STRING); - PARSER.declareObjectArray(Builder::setDetectorRules, - (parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), DETECTOR_RULES_FIELD); - PARSER.declareInt(Builder::setDetectorIndex, DETECTOR_INDEX); + PARSERS.put(MlParserType.METADATA, METADATA_PARSER); + PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); + for (MlParserType parserType : MlParserType.values()) { + ObjectParser parser = PARSERS.get(parserType); + assert parser != null; + parser.declareString(Builder::setDetectorDescription, DETECTOR_DESCRIPTION_FIELD); + parser.declareString(Builder::setFunction, FUNCTION_FIELD); + parser.declareString(Builder::setFieldName, FIELD_NAME_FIELD); + parser.declareString(Builder::setByFieldName, BY_FIELD_NAME_FIELD); + parser.declareString(Builder::setOverFieldName, OVER_FIELD_NAME_FIELD); + parser.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD); + parser.declareBoolean(Builder::setUseNull, USE_NULL_FIELD); + parser.declareField(Builder::setExcludeFrequent, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return ExcludeFrequent.forString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, EXCLUDE_FREQUENT_FIELD, ObjectParser.ValueType.STRING); + parser.declareObjectArray(Builder::setDetectorRules, (p, c) -> + DetectionRule.PARSERS.get(parserType).apply(p, c).build(), DETECTOR_RULES_FIELD); + parser.declareInt(Builder::setDetectorIndex, DETECTOR_INDEX); + } } public static final String COUNT = "count"; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Job.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Job.java index 85c9f907ad6..a68e8efcc6f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Job.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/Job.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.xpack.ml.MlParserType; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; @@ -29,6 +30,7 @@ import org.elasticsearch.xpack.ml.utils.time.TimeUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Date; +import java.util.EnumMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -77,55 +79,65 @@ public class Job extends AbstractDiffable implements Writeable, ToXContentO public static final String ALL = "_all"; - public static final ObjectParser PARSER = new ObjectParser<>("job_details", Builder::new); + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly + public static final ObjectParser METADATA_PARSER = new ObjectParser<>("job_details", true, Builder::new); + public static final ObjectParser CONFIG_PARSER = new ObjectParser<>("job_details", false, Builder::new); + public static final Map> PARSERS = new EnumMap<>(MlParserType.class); public static final int MAX_JOB_ID_LENGTH = 64; public static final TimeValue MIN_BACKGROUND_PERSIST_INTERVAL = TimeValue.timeValueHours(1); static { - PARSER.declareString(Builder::setId, ID); - PARSER.declareString(Builder::setJobType, JOB_TYPE); - PARSER.declareString(Builder::setJobVersion, JOB_VERSION); - PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION); - PARSER.declareField(Builder::setCreateTime, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + CREATE_TIME.getPreferredName() + "]"); - }, CREATE_TIME, ValueType.VALUE); - PARSER.declareField(Builder::setFinishedTime, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + FINISHED_TIME.getPreferredName() + "]"); - }, FINISHED_TIME, ValueType.VALUE); - PARSER.declareField(Builder::setLastDataTime, p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(TimeUtils.dateStringToEpoch(p.text())); - } - throw new IllegalArgumentException( - "unexpected token [" + p.currentToken() + "] for [" + LAST_DATA_TIME.getPreferredName() + "]"); - }, LAST_DATA_TIME, ValueType.VALUE); - PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG); - PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS); - PARSER.declareObject(Builder::setDataDescription, DataDescription.PARSER, DATA_DESCRIPTION); - PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, MODEL_PLOT_CONFIG); - PARSER.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS); - PARSER.declareString((builder, val) -> builder.setBackgroundPersistInterval( - TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName())), BACKGROUND_PERSIST_INTERVAL); - PARSER.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS); - PARSER.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS); - PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT); - PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); - PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); - PARSER.declareBoolean(Builder::setDeleted, DELETED); + PARSERS.put(MlParserType.METADATA, METADATA_PARSER); + PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); + for (MlParserType parserType : MlParserType.values()) { + ObjectParser parser = PARSERS.get(parserType); + assert parser != null; + parser.declareString(Builder::setId, ID); + parser.declareString(Builder::setJobType, JOB_TYPE); + parser.declareString(Builder::setJobVersion, JOB_VERSION); + parser.declareStringOrNull(Builder::setDescription, DESCRIPTION); + parser.declareField(Builder::setCreateTime, p -> { + if (p.currentToken() == Token.VALUE_NUMBER) { + return new Date(p.longValue()); + } else if (p.currentToken() == Token.VALUE_STRING) { + return new Date(TimeUtils.dateStringToEpoch(p.text())); + } + throw new IllegalArgumentException("unexpected token [" + p.currentToken() + + "] for [" + CREATE_TIME.getPreferredName() + "]"); + }, CREATE_TIME, ValueType.VALUE); + parser.declareField(Builder::setFinishedTime, p -> { + if (p.currentToken() == Token.VALUE_NUMBER) { + return new Date(p.longValue()); + } else if (p.currentToken() == Token.VALUE_STRING) { + return new Date(TimeUtils.dateStringToEpoch(p.text())); + } + throw new IllegalArgumentException( + "unexpected token [" + p.currentToken() + "] for [" + FINISHED_TIME.getPreferredName() + "]"); + }, FINISHED_TIME, ValueType.VALUE); + parser.declareField(Builder::setLastDataTime, p -> { + if (p.currentToken() == Token.VALUE_NUMBER) { + return new Date(p.longValue()); + } else if (p.currentToken() == Token.VALUE_STRING) { + return new Date(TimeUtils.dateStringToEpoch(p.text())); + } + throw new IllegalArgumentException( + "unexpected token [" + p.currentToken() + "] for [" + LAST_DATA_TIME.getPreferredName() + "]"); + }, LAST_DATA_TIME, ValueType.VALUE); + parser.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSERS.get(parserType), ANALYSIS_CONFIG); + parser.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSERS.get(parserType), ANALYSIS_LIMITS); + parser.declareObject(Builder::setDataDescription, DataDescription.PARSERS.get(parserType), DATA_DESCRIPTION); + parser.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSERS.get(parserType), MODEL_PLOT_CONFIG); + parser.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS); + parser.declareString((builder, val) -> builder.setBackgroundPersistInterval( + TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName())), BACKGROUND_PERSIST_INTERVAL); + parser.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS); + parser.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS); + parser.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT); + parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); + parser.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); + parser.declareBoolean(Builder::setDeleted, DELETED); + } } private final String jobId; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/JobUpdate.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/JobUpdate.java index e6e704e1150..e93f98fb4ab 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/JobUpdate.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/JobUpdate.java @@ -27,14 +27,14 @@ public class JobUpdate implements Writeable, ToXContentObject { public static final ParseField DETECTORS = new ParseField("detectors"); public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "job_update", args -> new Builder((String) args[0])); + "job_update", args -> new Builder((String) args[0])); static { PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Job.ID); PARSER.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION); PARSER.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS); - PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, Job.MODEL_PLOT_CONFIG); - PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, Job.ANALYSIS_LIMITS); + PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.CONFIG_PARSER, Job.MODEL_PLOT_CONFIG); + PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.CONFIG_PARSER, Job.ANALYSIS_LIMITS); PARSER.declareString((builder, val) -> builder.setBackgroundPersistInterval( TimeValue.parseTimeValue(val, Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName())), Job.BACKGROUND_PERSIST_INTERVAL); PARSER.declareLong(Builder::setRenormalizationWindowDays, Job.RENORMALIZATION_WINDOW_DAYS); @@ -326,7 +326,7 @@ public class JobUpdate implements Writeable, ToXContentObject { PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), Detector.DETECTOR_INDEX); PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION); PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), - (parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), RULES); + (parser, parseFieldMatcher) -> DetectionRule.CONFIG_PARSER.apply(parser, parseFieldMatcher).build(), RULES); } private int detectorIndex; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfig.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfig.java index f4ff8cf6ac8..39b50d30767 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfig.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfig.java @@ -12,8 +12,11 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.ml.MlParserType; import java.io.IOException; +import java.util.EnumMap; +import java.util.Map; import java.util.Objects; public class ModelPlotConfig implements ToXContentObject, Writeable { @@ -22,13 +25,25 @@ public class ModelPlotConfig implements ToXContentObject, Writeable { private static final ParseField ENABLED_FIELD = new ParseField("enabled"); public static final ParseField TERMS_FIELD = new ParseField("terms"); - public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(TYPE_FIELD.getPreferredName(), + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly + public static final ConstructingObjectParser METADATA_PARSER = + new ConstructingObjectParser<>(TYPE_FIELD.getPreferredName(), true, a -> new ModelPlotConfig((boolean) a[0], (String) a[1])); + public static final ConstructingObjectParser CONFIG_PARSER = + new ConstructingObjectParser<>(TYPE_FIELD.getPreferredName(), false, + a -> new ModelPlotConfig((boolean) a[0], (String) a[1])); + public static final Map> PARSERS = + new EnumMap<>(MlParserType.class); static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TERMS_FIELD); + PARSERS.put(MlParserType.METADATA, METADATA_PARSER); + PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); + for (MlParserType parserType : MlParserType.values()) { + ConstructingObjectParser parser = PARSERS.get(parserType); + assert parser != null; + parser.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), TERMS_FIELD); + } } private final boolean enabled; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleCondition.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleCondition.java index 764a6abecd6..437ff683cb1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleCondition.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/RuleCondition.java @@ -15,11 +15,14 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.ml.MlParserType; import org.elasticsearch.xpack.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.utils.ExceptionsHelper; import java.io.IOException; +import java.util.EnumMap; import java.util.EnumSet; +import java.util.Map; import java.util.Objects; public class RuleCondition implements ToXContentObject, Writeable { @@ -29,21 +32,33 @@ public class RuleCondition implements ToXContentObject, Writeable { public static final ParseField FIELD_VALUE_FIELD = new ParseField("field_value"); public static final ParseField VALUE_FILTER_FIELD = new ParseField("value_filter"); - public static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(), + // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly + public static final ConstructingObjectParser METADATA_PARSER = + new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(), true, a -> new RuleCondition((RuleConditionType) a[0], (String) a[1], (String) a[2], (Condition) a[3], (String) a[4])); + public static final ConstructingObjectParser CONFIG_PARSER = + new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(), false, + a -> new RuleCondition((RuleConditionType) a[0], (String) a[1], (String) a[2], (Condition) a[3], (String) a[4])); + public static final Map> PARSERS = + new EnumMap<>(MlParserType.class); static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return RuleConditionType.fromString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, CONDITION_TYPE_FIELD, ValueType.STRING); - PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_NAME_FIELD); - PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_VALUE_FIELD); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Condition.PARSER, Condition.CONDITION_FIELD); - PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), VALUE_FILTER_FIELD); + PARSERS.put(MlParserType.METADATA, METADATA_PARSER); + PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER); + for (MlParserType parserType : MlParserType.values()) { + ConstructingObjectParser parser = PARSERS.get(parserType); + assert parser != null; + parser.declareField(ConstructingObjectParser.constructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return RuleConditionType.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, CONDITION_TYPE_FIELD, ValueType.STRING); + parser.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_NAME_FIELD); + parser.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_VALUE_FIELD); + parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), Condition.PARSER, Condition.CONDITION_FIELD); + parser.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), VALUE_FILTER_FIELD); + } } private final RuleConditionType conditionType; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java index 92065a552b2..53290314ccc 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java @@ -79,7 +79,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase { @Override protected MlMetadata doParseInstance(XContentParser parser) { - return MlMetadata.ML_METADATA_PARSER.apply(parser, null).build(); + return MlMetadata.METADATA_PARSER.apply(parser, null).build(); } @Override diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/ChunkingConfigTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/ChunkingConfigTests.java index ef6acf070e6..0cbe57cfec3 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/ChunkingConfigTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/datafeed/ChunkingConfigTests.java @@ -29,7 +29,7 @@ public class ChunkingConfigTests extends AbstractSerializingTestCase DatafeedConfig.CONFIG_PARSER.apply(parser, null).build()); + assertEquals("[datafeed_config] unknown field [tomorrows_technology_today], parser not found", e.getMessage()); + } + + public void testFutureMetadataParse() throws IOException { + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, FUTURE_DATAFEED); + // Unlike the config version of this test, the metadata parser should tolerate the unknown future field + assertNotNull(DatafeedConfig.METADATA_PARSER.apply(parser, null).build()); } public void testCopyConstructor() { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfigTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfigTests.java index b1024bc3f4a..0973a1eaf27 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfigTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfigTests.java @@ -87,7 +87,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase AnalysisLimits.PARSER.apply(parser, null)); + ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.CONFIG_PARSER.apply(parser, null)); assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = -1")); } public void testParseModelMemoryLimitGivenZero() throws IOException { String json = "{\"model_memory_limit\": 0}"; XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json); - ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.PARSER.apply(parser, null)); + ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.CONFIG_PARSER.apply(parser, null)); assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0")); } @@ -56,7 +56,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase AnalysisLimits.PARSER.apply(parser, null)); + ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.CONFIG_PARSER.apply(parser, null)); assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = -4")); } public void testParseModelMemoryLimitGivenZeroString() throws IOException { String json = "{\"model_memory_limit\":\"0MB\"}"; XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json); - ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.PARSER.apply(parser, null)); + ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.CONFIG_PARSER.apply(parser, null)); assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0")); } public void testParseModelMemoryLimitGivenLessThanOneMBString() throws IOException { String json = "{\"model_memory_limit\":\"1000Kb\"}"; XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json); - ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.PARSER.apply(parser, null)); + ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.CONFIG_PARSER.apply(parser, null)); assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0")); } @@ -86,7 +86,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase DataDescription.PARSER.apply(parser, null)); + () -> DataDescription.CONFIG_PARSER.apply(parser, null)); assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [format]")); Throwable cause = ex.getCause(); assertNotNull(cause); @@ -213,7 +213,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase DataDescription.PARSER.apply(parser, null)); + () -> DataDescription.CONFIG_PARSER.apply(parser, null)); assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [field_delimiter]")); Throwable cause = ex.getCause(); assertNotNull(cause); @@ -226,7 +226,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase DataDescription.PARSER.apply(parser, null)); + () -> DataDescription.CONFIG_PARSER.apply(parser, null)); assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [quote_character]")); Throwable cause = ex.getCause(); assertNotNull(cause); @@ -270,6 +270,6 @@ public class DataDescriptionTests extends AbstractSerializingTestCase { @Override protected Detector doParseInstance(XContentParser parser) { - return Detector.PARSER.apply(parser, null).build(); + return Detector.CONFIG_PARSER.apply(parser, null).build(); } public void testVerifyFieldNames_givenInvalidChars() { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java index e57da1c1871..cd1e91d556c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobBuilderTests.java @@ -82,6 +82,6 @@ public class JobBuilderTests extends AbstractSerializingTestCase { @Override protected Job.Builder doParseInstance(XContentParser parser) { - return Job.PARSER.apply(parser, null); + return Job.CONFIG_PARSER.apply(parser, null); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java index 7c80e83a008..ee971febbea 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -34,6 +35,21 @@ import static org.hamcrest.Matchers.nullValue; public class JobTests extends AbstractSerializingTestCase { + private static final String FUTURE_JOB = "{\n" + + " \"job_id\": \"farequote\",\n" + + " \"create_time\": 1234567890000,\n" + + " \"tomorrows_technology_today\": \"wow\",\n" + + " \"analysis_config\": {\n" + + " \"bucket_span\": \"1h\",\n" + + " \"something_new\": \"gasp\",\n" + + " \"detectors\": [{\"function\": \"metric\", \"field_name\": \"responsetime\", \"by_field_name\": \"airline\"}]\n" + + " },\n" + + " \"data_description\": {\n" + + " \"time_field\": \"time\",\n" + + " \"the_future\": 123\n" + + " }\n" + + "}"; + @Override protected Job createTestInstance() { return createRandomizedJob(); @@ -46,7 +62,20 @@ public class JobTests extends AbstractSerializingTestCase { @Override protected Job doParseInstance(XContentParser parser) { - return Job.PARSER.apply(parser, null).build(); + return Job.CONFIG_PARSER.apply(parser, null).build(); + } + + public void testFutureConfigParse() throws IOException { + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, FUTURE_JOB); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> Job.CONFIG_PARSER.apply(parser, null).build()); + assertEquals("[job_details] unknown field [tomorrows_technology_today], parser not found", e.getMessage()); + } + + public void testFutureMetadataParse() throws IOException { + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, FUTURE_JOB); + // Unlike the config version of this test, the metadata parser should tolerate the unknown future field + assertNotNull(Job.METADATA_PARSER.apply(parser, null).build()); } public void testConstructor_GivenEmptyJobConfiguration() { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfigTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfigTests.java index a098035ae63..b85790669a2 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfigTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/ModelPlotConfigTests.java @@ -31,6 +31,6 @@ public class ModelPlotConfigTests extends AbstractSerializingTestCase Date: Thu, 6 Jul 2017 15:40:19 +0100 Subject: [PATCH 06/11] [ML] Write model_memory_limit with units into the cluster state (elastic/x-pack-elasticsearch#1934) This is step 2 of elastic/x-pack-elasticsearch#1604 This change stores `model_memory_limit` as a string with `mb` unit. I considered using the `toString` method of `ByteSizeValue` but it can lead to accuracy loss. Adding the fixed `mb` unit maintains the accuracy, while making clear what unit the value is in. Original commit: elastic/x-pack-elasticsearch@4dc48f0ce8515bbde200042dd0df2756b495fef3 --- docs/en/rest-api/ml/update-job.asciidoc | 4 ++-- .../elasticsearch/xpack/ml/job/config/AnalysisLimits.java | 2 +- .../xpack/ml/job/config/AnalysisLimitsTests.java | 6 ++++++ .../elasticsearch/xpack/ml/job/config/JobBuilderTests.java | 3 +-- .../org/elasticsearch/xpack/ml/job/config/JobTests.java | 2 +- .../elasticsearch/xpack/ml/job/config/JobUpdateTests.java | 2 +- .../src/test/resources/rest-api-spec/test/ml/jobs_crud.yml | 6 +++--- 7 files changed, 15 insertions(+), 10 deletions(-) diff --git a/docs/en/rest-api/ml/update-job.asciidoc b/docs/en/rest-api/ml/update-job.asciidoc index fee35776cec..a356fbbeeb9 100644 --- a/docs/en/rest-api/ml/update-job.asciidoc +++ b/docs/en/rest-api/ml/update-job.asciidoc @@ -92,7 +92,7 @@ POST _xpack/ml/anomaly_detectors/it_ops_new_logs/_update "enabled": true }, "analysis_limits": { - "model_memory_limit": 1024 + "model_memory_limit": "1024mb" }, "renormalization_window_days": 30, "background_persist_interval": "2h", @@ -135,7 +135,7 @@ information, including the updated property values. For example: "influencers": [] }, "analysis_limits": { - "model_memory_limit": 1024 + "model_memory_limit": "1024mb" }, "data_description": { "time_field": "time", diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java index 0cf3a22a7f9..9fb159c3355 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimits.java @@ -126,7 +126,7 @@ public class AnalysisLimits implements ToXContentObject, Writeable { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (modelMemoryLimit != null) { - builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit); + builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit + "mb"); } if (categorizationExamplesLimit != null) { builder.field(CATEGORIZATION_EXAMPLES_LIMIT.getPreferredName(), categorizationExamplesLimit); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimitsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimitsTests.java index a09b3fdab59..876cd705da3 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimitsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisLimitsTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.ml.job.config; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -24,6 +26,10 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase { builder.setAnalysisConfig(AnalysisConfigTests.createRandomized()); } if (randomBoolean()) { - builder.setAnalysisLimits(new AnalysisLimits(randomNonNegativeLong(), - randomNonNegativeLong())); + builder.setAnalysisLimits(AnalysisLimitsTests.createRandomized()); } if (randomBoolean()) { DataDescription.Builder dataDescription = new DataDescription.Builder(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java index ee971febbea..53c25fca109 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobTests.java @@ -508,7 +508,7 @@ public class JobTests extends AbstractSerializingTestCase { builder.setLastDataTime(new Date(randomNonNegativeLong())); } builder.setAnalysisConfig(AnalysisConfigTests.createRandomized()); - builder.setAnalysisLimits(new AnalysisLimits(randomNonNegativeLong(), randomNonNegativeLong())); + builder.setAnalysisLimits(AnalysisLimitsTests.createRandomized()); DataDescription.Builder dataDescription = new DataDescription.Builder(); dataDescription.setFormat(randomFrom(DataDescription.DataFormat.values())); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobUpdateTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobUpdateTests.java index e0ae23011ee..e261b3426a1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobUpdateTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/JobUpdateTests.java @@ -51,7 +51,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase { update.setModelPlotConfig(new ModelPlotConfig(randomBoolean(), randomAlphaOfLength(10))); } if (randomBoolean()) { - update.setAnalysisLimits(new AnalysisLimits(randomNonNegativeLong(), randomNonNegativeLong())); + update.setAnalysisLimits(AnalysisLimitsTests.createRandomized()); } if (randomBoolean()) { update.setRenormalizationWindowDays(randomNonNegativeLong()); diff --git a/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml b/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml index 07342f5c066..1070cd335d7 100644 --- a/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml +++ b/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml @@ -88,7 +88,7 @@ } } - match: { job_id: "job-model-memory-limit-as-number" } - - match: { analysis_limits.model_memory_limit: 2048 } + - match: { analysis_limits.model_memory_limit: "2048mb" } --- "Test put job with model_memory_limit as string": @@ -108,7 +108,7 @@ } } - match: { job_id: "job-model-memory-limit-as-string" } - - match: { analysis_limits.model_memory_limit: 3072 } + - match: { analysis_limits.model_memory_limit: "3072mb" } --- "Test get job API with non existing job id": @@ -302,7 +302,7 @@ - match: { description: "Post update description" } - match: { model_plot_config.enabled: false } - match: { model_plot_config.terms: "foobar" } - - match: { analysis_limits.model_memory_limit: 20 } + - match: { analysis_limits.model_memory_limit: "20mb" } - match: { analysis_config.categorization_filters: ["cat3.*"] } - match: { analysis_config.detectors.0.detector_rules.0.target_field_name: "airline" } - match: { analysis_config.detectors.0.detector_index: 0 } From d95c365e64880e4c514fde7637a75d9145d64542 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 6 Jul 2017 10:37:48 -0500 Subject: [PATCH 07/11] Loosen setup mode restrictions for upgrade tests (elastic/x-pack-elasticsearch#1927) This commit is related to elastic/x-pack-elasticsearch#1896. Currently setup mode means that the password must be set post 6.0 for using x-pack. This interferes with upgrade tests as setting the password fails without a properly upgraded security index. This commit loosens two aspects of the security. 1. The old default password will be accept in setup mode (requests from localhost). 2. All request types can be submitted in setup mode. Original commit: elastic/x-pack-elasticsearch@8a2a5770385ea6bf84a63352543479fa0abfd898 --- .../authc/esnative/ReservedRealm.java | 20 +++- .../security/authz/AuthorizationService.java | 11 ++- .../authz/AuthorizationServiceTests.java | 1 + qa/full-cluster-restart/build.gradle | 97 +----------------- .../xpack/restart/FullClusterRestartIT.java | 2 +- qa/rolling-upgrade/build.gradle | 98 +------------------ .../UpgradeClusterClientYamlTestSuiteIT.java | 2 +- .../WatchBackwardsCompatibilityIT.java | 2 +- 8 files changed, 37 insertions(+), 196 deletions(-) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java index aae18d0976a..3b55ce42dbe 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java @@ -50,6 +50,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { public static final SecureString EMPTY_PASSWORD_TEXT = new SecureString("".toCharArray()); static final char[] EMPTY_PASSWORD_HASH = Hasher.BCRYPT.hash(EMPTY_PASSWORD_TEXT); + static final char[] OLD_DEFAULT_PASSWORD_HASH = Hasher.BCRYPT.hash(new SecureString("changeme".toCharArray())); private static final ReservedUserInfo DEFAULT_USER_INFO = new ReservedUserInfo(EMPTY_PASSWORD_HASH, true, true); private static final ReservedUserInfo DISABLED_USER_INFO = new ReservedUserInfo(EMPTY_PASSWORD_HASH, false, true); @@ -102,9 +103,20 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { Runnable action; if (userInfo != null) { try { - if (userInfo.hasEmptyPassword && isSetupMode(token.principal(), acceptEmptyPassword) == false) { - action = () -> listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]", - token.principal())); + if (userInfo.hasEmptyPassword) { + // norelease + // Accepting the OLD_DEFAULT_PASSWORD_HASH is a transition step. We do not want to support + // this in a release. + if (isSetupMode(token.principal(), acceptEmptyPassword) == false) { + action = () -> listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]", + token.principal())); + } else if (verifyPassword(userInfo, token) + || Hasher.BCRYPT.verify(token.credentials(), OLD_DEFAULT_PASSWORD_HASH)) { + action = () -> listener.onResponse(getUser(token.principal(), userInfo)); + } else { + action = () -> listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]", + token.principal())); + } } else if (verifyPassword(userInfo, token)) { final User user = getUser(token.principal(), userInfo); action = () -> listener.onResponse(user); @@ -113,7 +125,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { token.principal())); } } finally { - if (userInfo.passwordHash != EMPTY_PASSWORD_HASH) { + if (userInfo.passwordHash != EMPTY_PASSWORD_HASH && userInfo.passwordHash != OLD_DEFAULT_PASSWORD_HASH) { Arrays.fill(userInfo.passwordHash, (char) 0); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 39a3c992007..10dfb8dc6ff 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -149,11 +149,14 @@ public class AuthorizationService extends AbstractComponent { throw denial(authentication, action, request); } + // norelease + // TODO: This functionality is disabled as it is not yet compatible with the upgrade process // If the user is the elastic user in setup mode, then only change password requests can be authorized - if (ElasticUser.isElasticUserInSetupMode(authentication.getUser()) - && ChangePasswordAction.NAME.equals(action) == false) { - throw denial(authentication, action, request); - } +// if (ElasticUser.isElasticUserInSetupMode(authentication.getUser()) +// && ChangePasswordAction.NAME.equals(action) == false +// && ClusterHealthAction.NAME.equals(action) == false) { +// throw denial(authentication, action, request); +// } // get the roles of the authenticated user, which may be different than the effective Role permission = userRole; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 89b4eaeae23..98e38474b43 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -349,6 +349,7 @@ public class AuthorizationServiceTests extends ESTestCase { verifyNoMoreInteractions(auditTrail); } + @AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/1217") public void testElasticUserOnlyAuthorizedForChangePasswordRequestsInSetupMode() { final User user = new ElasticUser(true, true); final ChangePasswordRequest changePasswordrequest = new ChangePasswordRequestBuilder(mock(Client.class)) diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index c326f0d9671..ff435e56fca 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -14,99 +14,13 @@ dependencies { testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'testArtifacts') } -Closure changePasswordAndWaitWithAuth = { NodeInfo node, AntBuilder ant -> - File tmpFile = new File(node.cwd, 'wait.success') - - String password - if (Version.fromString(node.nodeVersion).onOrAfter('6.0.0')) { - password = "" - } else { - password = "changeme" - } - - for (int i = 0; i < 10; i++) { - HttpURLConnection httpURLConnection = null; - try { - httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_xpack/security/user/elastic/_password") - .openConnection(); - httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("elastic:${password}".getBytes(StandardCharsets.UTF_8))); - httpURLConnection.setRequestMethod("PUT"); - httpURLConnection.setDoOutput(true); - httpURLConnection.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); - - httpURLConnection.connect(); - OutputStream out = httpURLConnection.getOutputStream(); - out.write("{\"password\": \"x-pack-test-password\"}".getBytes(StandardCharsets.UTF_8)); - out.close() - - if (httpURLConnection.getResponseCode() == 200) { - break - } - - } catch (Exception e) { - httpURLConnection.disconnect() - if (i == 9) { - logger.error("final attempt to set password", e) - } else { - logger.debug("failed to set elastic password", e) - } - } finally { - if (httpURLConnection != null) { - httpURLConnection.disconnect(); - } - } - - // did not start, so wait a bit before trying again - Thread.sleep(500L); - } - - // wait up to twenty seconds - final long stopTime = System.currentTimeMillis() + 20000L; - Exception lastException = null; - - while (System.currentTimeMillis() < stopTime) { - lastException = null; - // we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned - HttpURLConnection httpURLConnection = null; - try { - httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=${node.config.numNodes}&wait_for_status=yellow").openConnection(); - httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); - httpURLConnection.setRequestMethod("GET"); - httpURLConnection.setConnectTimeout(1000); - httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes! - httpURLConnection.connect(); - if (httpURLConnection.getResponseCode() == 200) { - tmpFile.withWriter StandardCharsets.UTF_8.name(), { - it.write(httpURLConnection.getInputStream().getText(StandardCharsets.UTF_8.name())) - } - break; - } - } catch (Exception e) { - logger.debug("failed to call cluster health", e) - lastException = e - } finally { - if (httpURLConnection != null) { - httpURLConnection.disconnect(); - } - } - - // did not start, so wait a bit before trying again - Thread.sleep(500L); - } - if (tmpFile.exists() == false && lastException != null) { - logger.error("final attempt of calling cluster health failed", lastException) - } - return tmpFile.exists() -} - - Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> File tmpFile = new File(node.cwd, 'wait.success') + // wait up to twenty seconds final long stopTime = System.currentTimeMillis() + 20000L; Exception lastException = null; + while (System.currentTimeMillis() < stopTime) { lastException = null; // we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned @@ -114,7 +28,7 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> try { httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=${node.config.numNodes}&wait_for_status=yellow").openConnection(); httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); + Base64.getEncoder().encodeToString("elastic:changeme".getBytes(StandardCharsets.UTF_8))); httpURLConnection.setRequestMethod("GET"); httpURLConnection.setConnectTimeout(1000); httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes! @@ -202,7 +116,7 @@ subprojects { numBwcNodes = 2 numNodes = 2 clusterName = 'full-cluster-restart' - waitCondition = changePasswordAndWaitWithAuth + waitCondition = waitWithAuth setting 'xpack.security.transport.ssl.enabled', 'true' setting 'xpack.ssl.keystore.path', 'testnode.jks' setting 'xpack.ssl.keystore.password', 'testnode' @@ -277,8 +191,7 @@ subprojects { } } - // NORELEASE : this test must be unmuted once https://github.com/elastic/dev/issues/741 is completed -// check.dependsOn(integTest) + check.dependsOn(integTest) dependencies { testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'runtime') diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 740ef381573..81a1b1aff0e 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -58,7 +58,7 @@ public class FullClusterRestartIT extends ESRestTestCase { @Override protected Settings restClientSettings() { - String token = "Basic " + Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8)); + String token = "Basic " + Base64.getEncoder().encodeToString("elastic:changeme".getBytes(StandardCharsets.UTF_8)); return Settings.builder() .put(ThreadContext.PREFIX + ".Authorization", token) // we increase the timeout here to 90 seconds to handle long waits for a green diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index 5b1050b7018..55ed25ebdbc 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -23,6 +23,7 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> Exception lastException = null; while (System.currentTimeMillis() < stopTime) { + lastException = null; // we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned HttpURLConnection httpURLConnection = null; @@ -30,96 +31,7 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> // TODO this sucks having to hardcode number of nodes, but node.config.numNodes isn't necessarily accurate for rolling httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=2&wait_for_status=yellow").openConnection(); httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); - httpURLConnection.setRequestMethod("GET"); - httpURLConnection.setConnectTimeout(1000); - httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes! - httpURLConnection.connect(); - if (httpURLConnection.getResponseCode() == 200) { - tmpFile.withWriter StandardCharsets.UTF_8.name(), { - it.write(httpURLConnection.getInputStream().getText(StandardCharsets.UTF_8.name())) - } - break; - } - } catch (Exception e) { - logger.debug("failed to call cluster health", e) - lastException = e - } finally { - if (httpURLConnection != null) { - httpURLConnection.disconnect(); - } - } - - // did not start, so wait a bit before trying again - Thread.sleep(500L); - } - if (tmpFile.exists() == false && lastException != null) { - logger.error("final attempt of calling cluster health failed", lastException) - } - return tmpFile.exists() -} - - -Closure changePasswordAndWaitWithAuth = { NodeInfo node, AntBuilder ant -> - File tmpFile = new File(node.cwd, 'wait.success') - - String password - if (Version.fromString(node.nodeVersion).onOrAfter('6.0.0')) { - password = "" - } else { - password = "changeme" - } - - for (int i = 0; i < 10; i++) { - HttpURLConnection httpURLConnection = null; - try { - httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_xpack/security/user/elastic/_password") - .openConnection(); - httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("elastic:${password}".getBytes(StandardCharsets.UTF_8))); - httpURLConnection.setRequestMethod("PUT"); - httpURLConnection.setDoOutput(true); - httpURLConnection.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); - - httpURLConnection.connect(); - OutputStream out = httpURLConnection.getOutputStream(); - out.write("{\"password\": \"x-pack-test-password\"}".getBytes(StandardCharsets.UTF_8)); - out.close() - - if (httpURLConnection.getResponseCode() == 200) { - break - } - - } catch (Exception e) { - httpURLConnection.disconnect() - if (i == 9) { - logger.error("final attempt to set password", e) - } else { - logger.debug("failed to set elastic password", e) - } - } finally { - if (httpURLConnection != null) { - httpURLConnection.disconnect(); - } - } - - // did not start, so wait a bit before trying again - Thread.sleep(500L); - } - - // wait up to twenty seconds - final long stopTime = System.currentTimeMillis() + 20000L; - Exception lastException = null; - - while (System.currentTimeMillis() < stopTime) { - lastException = null; - // we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned - HttpURLConnection httpURLConnection = null; - try { - // TODO this sucks having to hardcode number of nodes, but node.config.numNodes isn't necessarily accurate for rolling - httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=2&wait_for_status=yellow").openConnection(); - httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); + Base64.getEncoder().encodeToString("elastic:changeme".getBytes(StandardCharsets.UTF_8))); httpURLConnection.setRequestMethod("GET"); httpURLConnection.setConnectTimeout(1000); httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes! @@ -207,7 +119,7 @@ subprojects { numBwcNodes = 2 numNodes = 2 clusterName = 'rolling-upgrade' - waitCondition = changePasswordAndWaitWithAuth + waitCondition = waitWithAuth setting 'xpack.security.transport.ssl.enabled', 'true' setting 'xpack.ssl.keystore.path', 'testnode.jks' setting 'xpack.ssl.keystore.password', 'testnode' @@ -316,8 +228,8 @@ subprojects { dependsOn = ["v${wireCompatVersions[-1]}#bwcTest"] } } - // NORELEASE : this test must be unmuted once https://github.com/elastic/dev/issues/741 is completed -// check.dependsOn(integTest) + + check.dependsOn(integTest) dependencies { testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'runtime') diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index 5c520a60c79..d630e6efe5a 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -56,7 +56,7 @@ public class UpgradeClusterClientYamlTestSuiteIT extends SecurityClusterClientYa @Override protected Settings restClientSettings() { - String token = "Basic " + Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8)); + String token = "Basic " + Base64.getEncoder().encodeToString(("elastic:changeme").getBytes(StandardCharsets.UTF_8)); return Settings.builder() .put(ThreadContext.PREFIX + ".Authorization", token) // we increase the timeout here to 90 seconds to handle long waits for a green diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java index f7ab8850240..8da564f3ce7 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java @@ -112,7 +112,7 @@ public class WatchBackwardsCompatibilityIT extends ESRestTestCase { @Override protected Settings restClientSettings() { String token = "Basic " + Base64.getEncoder() - .encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8)); + .encodeToString(("elastic:changeme").getBytes(StandardCharsets.UTF_8)); return Settings.builder() .put(ThreadContext.PREFIX + ".Authorization", token) .build(); From 0e322b525dc15ad325f9e1e5fa9b2d6ebd6c1ced Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 6 Jul 2017 11:06:03 -0700 Subject: [PATCH 08/11] [DOCS] Update doc links for security APIs (elastic/x-pack-elasticsearch#1883) Original commit: elastic/x-pack-elasticsearch@660410008c6cb43c5db687686419164a24db1a0c --- .../rest-api-spec/api/xpack.security.authenticate.json | 2 +- .../rest-api-spec/api/xpack.security.change_password.json | 2 +- .../rest-api-spec/api/xpack.security.clear_cached_realms.json | 2 +- .../rest-api-spec/api/xpack.security.clear_cached_roles.json | 2 +- .../resources/rest-api-spec/api/xpack.security.delete_role.json | 2 +- .../rest-api-spec/api/xpack.security.delete_role_mapping.json | 2 +- .../resources/rest-api-spec/api/xpack.security.delete_user.json | 2 +- .../rest-api-spec/api/xpack.security.disable_user.json | 2 +- .../resources/rest-api-spec/api/xpack.security.enable_user.json | 2 +- .../resources/rest-api-spec/api/xpack.security.get_role.json | 2 +- .../rest-api-spec/api/xpack.security.get_role_mapping.json | 2 +- .../resources/rest-api-spec/api/xpack.security.get_token.json | 2 +- .../resources/rest-api-spec/api/xpack.security.get_user.json | 2 +- .../rest-api-spec/api/xpack.security.invalidate_token.json | 2 +- .../resources/rest-api-spec/api/xpack.security.put_role.json | 2 +- .../rest-api-spec/api/xpack.security.put_role_mapping.json | 2 +- .../resources/rest-api-spec/api/xpack.security.put_user.json | 2 +- 17 files changed, 17 insertions(+), 17 deletions(-) diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.authenticate.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.authenticate.json index 3db7084b342..650f89e89a4 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.authenticate.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.authenticate.json @@ -1,6 +1,6 @@ { "xpack.security.authenticate": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-authenticate.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-authenticate.html", "methods": [ "GET" ], "url": { "path": "/_xpack/security/_authenticate", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.change_password.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.change_password.json index b193284c1e3..7cb4277ee50 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.change_password.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.change_password.json @@ -1,6 +1,6 @@ { "xpack.security.change_password": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-change-password.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-change-password.html", "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/user/{username}/_password", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_realms.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_realms.json index 21c6305304c..059441d654e 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_realms.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_realms.json @@ -1,6 +1,6 @@ { "xpack.security.clear_cached_realms": { - "documentation": "https://www.elastic.co/guide/en/x-pack/current/security-api-clear-cache.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-cache.html", "methods": [ "POST" ], "url": { "path": "/_xpack/security/realm/{realms}/_clear_cache", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_roles.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_roles.json index 65426cdc29b..c94333325b1 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_roles.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.clear_cached_roles.json @@ -1,6 +1,6 @@ { "xpack.security.clear_cached_roles": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-roles.html#security-api-clear-role-cache", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-clear-role-cache", "methods": [ "POST" ], "url": { "path": "/_xpack/security/role/{name}/_clear_cache", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json index 365d3ba4a5c..4351b1bc847 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role.json @@ -1,6 +1,6 @@ { "xpack.security.delete_role": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-roles.html#security-api-delete-role", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-delete-role", "methods": [ "DELETE" ], "url": { "path": "/_xpack/security/role/{name}", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role_mapping.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role_mapping.json index 6d6657bed46..26c72666e8f 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role_mapping.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_role_mapping.json @@ -1,6 +1,6 @@ { "xpack.security.delete_role_mapping": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-role-mapping.html#security-api-delete-role-mapping", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-role-mapping.html#security-api-delete-role-mapping", "methods": [ "DELETE" ], "url": { "path": "/_xpack/security/role_mapping/{name}", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json index 4e6c1cc5370..d72c854a69d 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.delete_user.json @@ -1,6 +1,6 @@ { "xpack.security.delete_user": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-users.html#security-api-delete-user", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-delete-user", "methods": [ "DELETE" ], "url": { "path": "/_xpack/security/user/{username}", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json index 75c1d26cd8a..3a72b314191 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.disable_user.json @@ -1,6 +1,6 @@ { "xpack.security.disable_user": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-disable-user.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-disable-user", "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/user/{username}/_disable", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json index eaf40c09275..c68144957f0 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.enable_user.json @@ -1,6 +1,6 @@ { "xpack.security.enable_user": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-enable-user.html", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-enable-user", "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/user/{username}/_enable", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role.json index 20292019dcb..3479c911ccd 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role.json @@ -1,6 +1,6 @@ { "xpack.security.get_role": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-roles.html#security-api-get-role", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-get-role", "methods": [ "GET" ], "url": { "path": "/_xpack/security/role/{name}", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role_mapping.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role_mapping.json index e33183c96f3..0bdeb54cfb6 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role_mapping.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_role_mapping.json @@ -1,6 +1,6 @@ { "xpack.security.get_role_mapping": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-role-mapping.html#security-api-get-role-mapping", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-role-mapping.html#security-api-get-role-mapping", "methods": [ "GET" ], "url": { "path": "/_xpack/security/role_mapping/{name}", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_token.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_token.json index 07c060f56a8..8020d1ecd6d 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_token.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_token.json @@ -1,6 +1,6 @@ { "xpack.security.get_token": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-tokens.html#security-api-get-token", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-tokens.html#security-api-get-token", "methods": [ "POST" ], "url": { "path": "/_xpack/security/oauth2/token", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_user.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_user.json index 8853275c19f..910fb7d0645 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_user.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.get_user.json @@ -1,6 +1,6 @@ { "xpack.security.get_user": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-users.html#security-api-get-user", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-get-user", "methods": [ "GET" ], "url": { "path": "/_xpack/security/user/{username}", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.invalidate_token.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.invalidate_token.json index 88ad6edbb5b..be032c2ffd0 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.invalidate_token.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.invalidate_token.json @@ -1,6 +1,6 @@ { "xpack.security.invalidate_token": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-tokens.html#security-api-invalidate-token", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-tokens.html#security-api-invalidate-token", "methods": [ "DELETE" ], "url": { "path": "/_xpack/security/oauth2/token", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role.json index c2d51dc016a..4152975189e 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role.json @@ -1,6 +1,6 @@ { "xpack.security.put_role": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-roles.html#security-api-put-role", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-put-role", "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/role/{name}", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role_mapping.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role_mapping.json index 919e174f28e..3f92cd130ba 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role_mapping.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_role_mapping.json @@ -1,6 +1,6 @@ { "xpack.security.put_role_mapping": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-role-mapping.html#security-api-put-role-mapping", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-role-mapping.html#security-api-put-role-mapping", "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/role_mapping/{name}", diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_user.json b/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_user.json index a589dd1e61d..de07498a409 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_user.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.security.put_user.json @@ -1,6 +1,6 @@ { "xpack.security.put_user": { - "documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-users.html#security-api-put-user", + "documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-put-user", "methods": [ "PUT", "POST" ], "url": { "path": "/_xpack/security/user/{username}", From b636dcc3667a499b80a86a315dd11691779ff65a Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 6 Jul 2017 15:37:17 -0400 Subject: [PATCH 09/11] Upgrade to Netty 4.1.13.Final This commit updates the SHAs here as core upgraded the Netty dependency from 4.1.11.Final to 4.1.13.Final. Relates elastic/x-pack-elasticsearch#1936 Original commit: elastic/x-pack-elasticsearch@d1803fc3311e691ba82b6dcf3f713ab246b06c35 --- plugin/licenses/netty-buffer-4.1.11.Final.jar.sha1 | 1 - plugin/licenses/netty-buffer-4.1.13.Final.jar.sha1 | 1 + plugin/licenses/netty-codec-4.1.11.Final.jar.sha1 | 1 - plugin/licenses/netty-codec-4.1.13.Final.jar.sha1 | 1 + plugin/licenses/netty-codec-http-4.1.11.Final.jar.sha1 | 1 - plugin/licenses/netty-codec-http-4.1.13.Final.jar.sha1 | 1 + plugin/licenses/netty-common-4.1.11.Final.jar.sha1 | 1 - plugin/licenses/netty-common-4.1.13.Final.jar.sha1 | 1 + plugin/licenses/netty-handler-4.1.11.Final.jar.sha1 | 1 - plugin/licenses/netty-handler-4.1.13.Final.jar.sha1 | 1 + plugin/licenses/netty-resolver-4.1.11.Final.jar.sha1 | 1 - plugin/licenses/netty-resolver-4.1.13.Final.jar.sha1 | 1 + plugin/licenses/netty-transport-4.1.11.Final.jar.sha1 | 1 - plugin/licenses/netty-transport-4.1.13.Final.jar.sha1 | 1 + 14 files changed, 7 insertions(+), 7 deletions(-) delete mode 100644 plugin/licenses/netty-buffer-4.1.11.Final.jar.sha1 create mode 100644 plugin/licenses/netty-buffer-4.1.13.Final.jar.sha1 delete mode 100644 plugin/licenses/netty-codec-4.1.11.Final.jar.sha1 create mode 100644 plugin/licenses/netty-codec-4.1.13.Final.jar.sha1 delete mode 100644 plugin/licenses/netty-codec-http-4.1.11.Final.jar.sha1 create mode 100644 plugin/licenses/netty-codec-http-4.1.13.Final.jar.sha1 delete mode 100644 plugin/licenses/netty-common-4.1.11.Final.jar.sha1 create mode 100644 plugin/licenses/netty-common-4.1.13.Final.jar.sha1 delete mode 100644 plugin/licenses/netty-handler-4.1.11.Final.jar.sha1 create mode 100644 plugin/licenses/netty-handler-4.1.13.Final.jar.sha1 delete mode 100644 plugin/licenses/netty-resolver-4.1.11.Final.jar.sha1 create mode 100644 plugin/licenses/netty-resolver-4.1.13.Final.jar.sha1 delete mode 100644 plugin/licenses/netty-transport-4.1.11.Final.jar.sha1 create mode 100644 plugin/licenses/netty-transport-4.1.13.Final.jar.sha1 diff --git a/plugin/licenses/netty-buffer-4.1.11.Final.jar.sha1 b/plugin/licenses/netty-buffer-4.1.11.Final.jar.sha1 deleted file mode 100644 index 5c7fd45c71b..00000000000 --- a/plugin/licenses/netty-buffer-4.1.11.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -84da342824017dcbeefda0becfef11ce2b5836da \ No newline at end of file diff --git a/plugin/licenses/netty-buffer-4.1.13.Final.jar.sha1 b/plugin/licenses/netty-buffer-4.1.13.Final.jar.sha1 new file mode 100644 index 00000000000..31f015e158a --- /dev/null +++ b/plugin/licenses/netty-buffer-4.1.13.Final.jar.sha1 @@ -0,0 +1 @@ +0e3f583ea8a2618a7563b1ee2aa696c23edcc3d8 \ No newline at end of file diff --git a/plugin/licenses/netty-codec-4.1.11.Final.jar.sha1 b/plugin/licenses/netty-codec-4.1.11.Final.jar.sha1 deleted file mode 100644 index 0d5093c08a2..00000000000 --- a/plugin/licenses/netty-codec-4.1.11.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d9ffe2192b567a4df052f6a36e7b7090b510e0cf \ No newline at end of file diff --git a/plugin/licenses/netty-codec-4.1.13.Final.jar.sha1 b/plugin/licenses/netty-codec-4.1.13.Final.jar.sha1 new file mode 100644 index 00000000000..6e7f1bdc14c --- /dev/null +++ b/plugin/licenses/netty-codec-4.1.13.Final.jar.sha1 @@ -0,0 +1 @@ +370eeb6e9d92495a2a3be096ab6102755af76730 \ No newline at end of file diff --git a/plugin/licenses/netty-codec-http-4.1.11.Final.jar.sha1 b/plugin/licenses/netty-codec-http-4.1.11.Final.jar.sha1 deleted file mode 100644 index ba7787f052d..00000000000 --- a/plugin/licenses/netty-codec-http-4.1.11.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3edeb0f08e455e570a55eb56bf64595fcb1a6b15 \ No newline at end of file diff --git a/plugin/licenses/netty-codec-http-4.1.13.Final.jar.sha1 b/plugin/licenses/netty-codec-http-4.1.13.Final.jar.sha1 new file mode 100644 index 00000000000..80d18918e1d --- /dev/null +++ b/plugin/licenses/netty-codec-http-4.1.13.Final.jar.sha1 @@ -0,0 +1 @@ +0ee87368766e6b900cf6be8ac9cdce27156e9411 \ No newline at end of file diff --git a/plugin/licenses/netty-common-4.1.11.Final.jar.sha1 b/plugin/licenses/netty-common-4.1.11.Final.jar.sha1 deleted file mode 100644 index 786f535a322..00000000000 --- a/plugin/licenses/netty-common-4.1.11.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f79a702bc5f275832ae18e33ba3d2a264a4aa728 \ No newline at end of file diff --git a/plugin/licenses/netty-common-4.1.13.Final.jar.sha1 b/plugin/licenses/netty-common-4.1.13.Final.jar.sha1 new file mode 100644 index 00000000000..044ec3ef4ed --- /dev/null +++ b/plugin/licenses/netty-common-4.1.13.Final.jar.sha1 @@ -0,0 +1 @@ +f640e8cd8866527150784f8986152d3bba45b712 \ No newline at end of file diff --git a/plugin/licenses/netty-handler-4.1.11.Final.jar.sha1 b/plugin/licenses/netty-handler-4.1.11.Final.jar.sha1 deleted file mode 100644 index 5a27bb52a82..00000000000 --- a/plugin/licenses/netty-handler-4.1.11.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6f43aae489b2e4fd7446cd347b077bb058a225d8 \ No newline at end of file diff --git a/plugin/licenses/netty-handler-4.1.13.Final.jar.sha1 b/plugin/licenses/netty-handler-4.1.13.Final.jar.sha1 new file mode 100644 index 00000000000..862f16a32a3 --- /dev/null +++ b/plugin/licenses/netty-handler-4.1.13.Final.jar.sha1 @@ -0,0 +1 @@ +85847aa81a98d29948731befb4784d141046fa0e \ No newline at end of file diff --git a/plugin/licenses/netty-resolver-4.1.11.Final.jar.sha1 b/plugin/licenses/netty-resolver-4.1.11.Final.jar.sha1 deleted file mode 100644 index 5fdf253a110..00000000000 --- a/plugin/licenses/netty-resolver-4.1.11.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3310d435f97ef9769dd5659dae3ef762ee3f0f57 \ No newline at end of file diff --git a/plugin/licenses/netty-resolver-4.1.13.Final.jar.sha1 b/plugin/licenses/netty-resolver-4.1.13.Final.jar.sha1 new file mode 100644 index 00000000000..7857ddac89c --- /dev/null +++ b/plugin/licenses/netty-resolver-4.1.13.Final.jar.sha1 @@ -0,0 +1 @@ +d33ce420bd22c8a53246296ceb6e1ff08d31f8e1 \ No newline at end of file diff --git a/plugin/licenses/netty-transport-4.1.11.Final.jar.sha1 b/plugin/licenses/netty-transport-4.1.11.Final.jar.sha1 deleted file mode 100644 index 4c2de1a7a1a..00000000000 --- a/plugin/licenses/netty-transport-4.1.11.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6244fb27cbc24a8d006e9aaaead6b25dcf3aa2e1 \ No newline at end of file diff --git a/plugin/licenses/netty-transport-4.1.13.Final.jar.sha1 b/plugin/licenses/netty-transport-4.1.13.Final.jar.sha1 new file mode 100644 index 00000000000..dc86ce66f43 --- /dev/null +++ b/plugin/licenses/netty-transport-4.1.13.Final.jar.sha1 @@ -0,0 +1 @@ +5008406221a849a350ad2a8885f14ac330e038f3 \ No newline at end of file From c5012ac6e823e1f38928291ece83a6575b139ebe Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Fri, 7 Jul 2017 13:33:35 +1000 Subject: [PATCH 10/11] [DOC] Miscellaneous security doc updates (elastic/x-pack-elasticsearch#1908) - Document refresh interval for role mapping files - Fix obsolete shield reference in transport profile example - Clarify that AD & PKI don't support run_as - Fix logstash conf examples - Clarify interaction of SSL settings and PKI realm settings - Document PKI DN format, and recommend use of pki_dn metadata - Provide more details about action.auto_create_index during setup Original commit: elastic/x-pack-elasticsearch@49ddb12a7e8ce390948cac354d1b3bfc4ee9a9ae --- docs/en/installing-xes.asciidoc | 15 ++- .../authentication/pki-realm.asciidoc | 103 +++++++++++++----- .../authorization/mapping-roles.asciidoc | 6 + .../authorization/run-as-privilege.asciidoc | 6 +- .../separating-node-client-traffic.asciidoc | 2 +- .../logstash.asciidoc | 7 +- docs/en/settings/security-settings.asciidoc | 2 + 7 files changed, 108 insertions(+), 33 deletions(-) diff --git a/docs/en/installing-xes.asciidoc b/docs/en/installing-xes.asciidoc index e71bc947369..5faecd228ff 100644 --- a/docs/en/installing-xes.asciidoc +++ b/docs/en/installing-xes.asciidoc @@ -82,7 +82,10 @@ Continue with installation? [y/N]y ---------------------------------------------------------- -- -. If you have disabled automatic index creation in {es}, configure +. {xpack} will try to automatically create a number of indices within {es}. +By default, {es} is configured to allow automatic index creation, and no +additional steps are required. However, if you have disabled automatic index +creation in {es}, you must configure {ref}/docs-index_.html#index-creation[`action.auto_create_index`] in `elasticsearch.yml` to allow {xpack} to create the following indices: + @@ -92,6 +95,16 @@ Continue with installation? [y/N]y action.auto_create_index: .security,.monitoring*,.watches,.triggered_watches,.watcher-history*,.ml* ----------------------------------------------------------- -- ++ +[IMPORTANT] +============================================================================= +If you are using https://www.elastic.co/products/logstash[Logstash] +or https://www.elastic.co/products/beats[Beats] then you will most likely +require additional index names in your `action.auto_create_index` setting, and +the exact value will depend on your local configuration. If you are unsure of +the correct value for your environment, you may consider setting the value to + `*` which will allow automatic creation of all indices. +============================================================================= . Start {es}. + diff --git a/docs/en/security/authentication/pki-realm.asciidoc b/docs/en/security/authentication/pki-realm.asciidoc index 57c2a7435df..fbfae67ada1 100644 --- a/docs/en/security/authentication/pki-realm.asciidoc +++ b/docs/en/security/authentication/pki-realm.asciidoc @@ -65,11 +65,40 @@ xpack: username_pattern: "EMAILADDRESS=(.*?)(?:,|$)" ------------------------------------------------------------ + -You can also specify which truststore to use for authentication. This is useful -when the SSL/TLS layer trusts clients with certificates that are signed by a -different CA than the one that signs your users' certificates. To specify the -location of the truststore, specify the `truststore.path` option: -+ +. Restart Elasticsearch. + +[[pki-ssl-config]] +==== PKI and SSL Settings + +The PKI realm relies on the SSL settings of the node's network interface +(transport or http). The realm can be configured to be more restrictive than +the underlying network connection - that is, it is possible to configure the +node such that some connections are accepted by the network interface but then +fail to be authenticated by the PKI realm. However the reverse is not possible +- the PKI realm cannot authenticate a connection that has been refused by the +network interface. + +In particular this means: + +* The transport or http interface must request client certificates by setting + `client_authentication` to `optional` or `required`. +* The interface must _trust_ the certificate that is presented by the client + by configuring either the `truststore` or `certificate_authorities` paths, + or by setting `verification_mode` to `none`. +* The _protocols_ supported by the interface must be compatible with those + used by the client. + + +The relevant network interface (transport or http) must be configured to trust +any certificate that is to be used within the PKI realm. However it possible to +configure the PKI realm to trust only a _subset_ of the certificates accepted +by the network interface. +This is useful when the SSL/TLS layer trusts clients with certificates that are +signed by a different CA than the one that signs your users' certificates. + +To configure the PKI realm with its own truststore, specify the +`truststore.path` option as below: + [source, yaml] ------------------------------------------------------------ xpack: @@ -83,35 +112,41 @@ xpack: password: "x-pack-test-password" ------------------------------------------------------------ -. Restart Elasticsearch. +The `certificate_authorities` option may be used as an alternative to the +`truststore.path` setting. + [[pki-settings]] ===== PKI Realm Settings [cols="4,^3,10"] |======================= -| Setting | Required | Description -| `type` | yes | Indicates the realm type. Must be set to `pki`. -| `order` | no | Indicates the priority of this realm within the realm - chain. Realms with a lower order are consulted first. - Although not required, we recommend explicitly - setting this value when you configure multiple realms. - Defaults to `Integer.MAX_VALUE`. -| `enabled` | no | Indicates whether this realm is enabled or disabled. - Enables you to disable a realm without removing its - configuration. Defaults to `true`. -| `username_pattern` | no | Specifies the regular expression pattern used to extract - the username from the certificate DN. The first match - group is used as the username. Defaults to `CN=(.*?)(?:,\|$)`. -| `truststore.path` | no | The path to the truststore. Defaults to the path - defined by {ref}/security-settings.html#ssl-tls-settings[SSL/TLS settings]. -| `truststore.password` | no/yes | Specifies the password for the truststore. Must be - provided if `truststore.path` is set. -| `truststore.algorithm` | no | Specifies the algorithm used for the truststore. - Defaults to `SunX509`. -| `files.role_mapping` | no | Specifies the <> - for the <>. - Defaults to `CONFIG_DIR/x-pack/role_mapping.yml`. +| Setting | Required | Description +| `type` | yes | Indicates the realm type. Must be set to `pki`. +| `order` | no | Indicates the priority of this realm within the realm + chain. Realms with a lower order are consulted first. + Although not required, we recommend explicitly + setting this value when you configure multiple realms. + Defaults to `Integer.MAX_VALUE`. +| `enabled` | no | Indicates whether this realm is enabled or disabled. + Enables you to disable a realm without removing its + configuration. Defaults to `true`. +| `username_pattern` | no | Specifies the regular expression pattern used to extract + the username from the certificate DN. The first match + group is used as the username. Defaults to `CN=(.*?)(?:,\|$)`. +| `certificate_authorities` | no | List of paths to the PEM encoded certificate files + that should be trusted. + This setting may not be used with `truststore.path`. +| `truststore.path` | no | The path to the truststore. Defaults to the path + defined by {ref}/security-settings.html#ssl-tls-settings[SSL/TLS settings]. + This setting may not be used with `certificate_authorities`. +| `truststore.password` | no/yes | Specifies the password for the truststore. Must be + provided if `truststore.path` is set. +| `truststore.algorithm` | no | Specifies the algorithm used for the truststore. + Defaults to `SunX509`. +| `files.role_mapping` | no | Specifies the <> + for the <>. + Defaults to `CONFIG_DIR/x-pack/role_mapping.yml`. |======================= [[assigning-roles-pki]] @@ -151,4 +186,16 @@ user: <1> <1> The name of a role. <2> The distinguished name (DN) of a PKI user. +The disinguished name for a PKI user follows X.500 naming conventions which +place the most specific fields (like `cn` or `uid`) at the beginning of the +name, and the most general fields (like `o` or `dc`) at the end of the name. +Some tools, such as _openssl_, may print out the subject name in a different + format. + +One way that you can determine the correct DN for a certificate is to use the +{ref}/security-api-authenticate.html[authenticate API] (use the relevant PKI +certificate as the means of authentication) and inspect the metadata field in +the result. The user's distinguished name will be populated under the `pki_dn` +key. You can also use the authenticate API to validate your role mapping. + For more information, see <>. diff --git a/docs/en/security/authorization/mapping-roles.asciidoc b/docs/en/security/authorization/mapping-roles.asciidoc index 3106c16fb72..86cd76797ca 100644 --- a/docs/en/security/authorization/mapping-roles.asciidoc +++ b/docs/en/security/authorization/mapping-roles.asciidoc @@ -258,6 +258,12 @@ are values. The mappings can have a many-to-many relationship. When you map role to groups, the roles of a user in that group are the combination of the roles assigned to that group and the roles assigned to that user. +By default, {security} checks role mapping files for changes every 5 seconds. +You can change this default behavior by changing the +`resource.reload.interval.high` setting in the `elasticsearch.yml` file +(as this is a common setting in Elasticsearch, changing its value may effect +other schedules in the system). + ==== Realm Specific Details [float] [[ldap-role-mapping]] diff --git a/docs/en/security/authorization/run-as-privilege.asciidoc b/docs/en/security/authorization/run-as-privilege.asciidoc index fb9d159a786..0db5b53a9dd 100644 --- a/docs/en/security/authorization/run-as-privilege.asciidoc +++ b/docs/en/security/authorization/run-as-privilege.asciidoc @@ -8,8 +8,10 @@ users, you can use the _run as_ mechanism to restrict data access according to To "run as" (impersonate) another user, you must be able to retrieve the user from the realm you use to authenticate. Both the internal `native` and `file` realms -support this out of the box. The LDAP realm however must be configured to enable -user search. For more information, see <>. +support this out of the box. The LDAP realm however must be configured to run in +_user search_ mode. For more information, see +<>. +The Active Directory and PKI realms do not support "run as". To submit requests on behalf of other users, you need to have the `run_as` permission. For example, the following role grants permission to submit request diff --git a/docs/en/security/securing-communications/separating-node-client-traffic.asciidoc b/docs/en/security/securing-communications/separating-node-client-traffic.asciidoc index 9df51383959..8afb089ce9c 100644 --- a/docs/en/security/securing-communications/separating-node-client-traffic.asciidoc +++ b/docs/en/security/securing-communications/separating-node-client-traffic.asciidoc @@ -13,7 +13,7 @@ to `elasticsearch.yml`: -------------------------------------------------- transport.profiles.client: <1> port: 9500-9600 <2> - shield: + xpack.security: type: client <3> -------------------------------------------------- <1> `client` is the name of this example profile diff --git a/docs/en/security/tribe-clients-integrations/logstash.asciidoc b/docs/en/security/tribe-clients-integrations/logstash.asciidoc index 9cbfa30368c..5c5f064f332 100644 --- a/docs/en/security/tribe-clients-integrations/logstash.asciidoc +++ b/docs/en/security/tribe-clients-integrations/logstash.asciidoc @@ -73,22 +73,27 @@ plugins in your Logstash `.conf` file. For example: + [source,js] -------------------------------------------------- -input { +input { + elasticsearch { ... user => logstash_internal password => x-pack-test-password } +} filter { + elasticsearch { ... user => logstash_internal password => x-pack-test-password } +} output { elasticsearch { ... user => logstash_internal password => x-pack-test-password } +} -------------------------------------------------- [float] diff --git a/docs/en/settings/security-settings.asciidoc b/docs/en/settings/security-settings.asciidoc index 2cc4836cada..89a5ad62bdb 100644 --- a/docs/en/settings/security-settings.asciidoc +++ b/docs/en/settings/security-settings.asciidoc @@ -494,10 +494,12 @@ Defaults to `CN=(.*?)(?:,\|$)` `certificate_authorities`:: List of PEM certificate files that should be used to authenticate a user's certificate as trusted. Defaults to the trusted certificates configured for SSL. +See the {xpack-ref}/pki-realm.html#pki-ssl-config[SSL settings] section of the PKI realm documentation for more information. This setting may not be used with `truststore.path`. `truststore.path`:: The path of a truststore to use. Defaults to the trusted certificates configured for SSL. +See the {xpack-ref}/pki-realm.html#pki-ssl-config[SSL settings] section of the PKI realm documentation for more information. This setting may not be used with `certificate_authorities`. `truststore.password`:: From c87d9278a62ac08b0bfd42b8816f9f30388f5759 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 7 Jul 2017 09:41:34 +0200 Subject: [PATCH 11/11] Add validation for all `transport.profile.*` settings (elastic/x-pack-elasticsearch#1909) Follow-up from elasticsearch/elastic#25508 Original commit: elastic/x-pack-elasticsearch@fe08e74cccc1aa75c2c12bf2e14744f0b2f9ab1e --- .../xpack/security/Security.java | 4 + .../SecurityServerTransportInterceptor.java | 22 ++- .../security/transport/filter/IPFilter.java | 49 ++++-- .../netty4/SecurityNetty4Transport.java | 8 +- .../xpack/ssl/SSLConfigurationSettings.java | 166 ++++++++++++++---- .../elasticsearch/xpack/ssl/SSLService.java | 3 +- ...ServerTransportFilterIntegrationTests.java | 78 ++++++-- .../transport/filter/IPFilterTests.java | 25 ++- .../IpFilterRemoteAddressFilterTests.java | 4 +- .../netty4/SecurityNetty4TransportTests.java | 24 +-- 10 files changed, 295 insertions(+), 88 deletions(-) diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java b/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java index 85e7687d353..8029341f5c4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -154,6 +154,7 @@ import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4HttpServe import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4Transport; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.ssl.SSLBootstrapCheck; +import org.elasticsearch.xpack.ssl.SSLConfigurationSettings; import org.elasticsearch.xpack.ssl.SSLService; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -179,6 +180,7 @@ import java.util.stream.Collectors; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; +import static org.elasticsearch.common.settings.Setting.groupSetting; import static org.elasticsearch.xpack.XPackSettings.HTTP_SSL_ENABLED; public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin { @@ -463,6 +465,8 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin { settingsList.add(TokenService.TOKEN_PASSPHRASE); settingsList.add(TokenService.DELETE_INTERVAL); settingsList.add(TokenService.DELETE_TIMEOUT); + settingsList.add(SecurityServerTransportInterceptor.TRANSPORT_TYPE_PROFILE_SETTING); + settingsList.addAll(SSLConfigurationSettings.getProfileSettings()); // hide settings settingsList.add(Setting.listSetting(setting("hide_settings"), Collections.emptyList(), Function.identity(), diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java index cab8de6adee..16eb8e202dc 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -44,12 +45,24 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Executor; +import java.util.function.Function; import static org.elasticsearch.xpack.security.Security.setting; public class SecurityServerTransportInterceptor extends AbstractComponent implements TransportInterceptor { - private static final String SETTING_NAME = "xpack.security.type"; + private static final Function> TRANSPORT_TYPE_SETTING_TEMPLATE = (key) -> new Setting<>(key, + "node", v + -> { + if (v.equals("node") || v.equals("client")) { + return v; + } + throw new IllegalArgumentException("type must be one of [client, node]"); + }, Setting.Property.NodeScope); + private static final String TRANSPORT_TYPE_SETTING_KEY = "xpack.security.type"; + + public static final Setting TRANSPORT_TYPE_PROFILE_SETTING = Setting.affixKeySetting("transport.profiles.", + TRANSPORT_TYPE_SETTING_KEY, TRANSPORT_TYPE_SETTING_TEMPLATE); private final AuthenticationService authcService; private final AuthorizationService authzService; @@ -154,17 +167,20 @@ public class SecurityServerTransportInterceptor extends AbstractComponent implem Settings profileSettings = entry.getValue(); final Settings profileSslSettings = SecurityNetty4Transport.profileSslSettings(profileSettings); final boolean extractClientCert = sslService.isSSLClientAuthEnabled(profileSslSettings, transportSSLSettings); - String type = entry.getValue().get(SETTING_NAME, "node"); + String type = TRANSPORT_TYPE_SETTING_TEMPLATE.apply(TRANSPORT_TYPE_SETTING_KEY).get(entry.getValue()); switch (type) { case "client": profileFilters.put(entry.getKey(), new ServerTransportFilter.ClientProfile(authcService, authzService, threadPool.getThreadContext(), extractClientCert, destructiveOperations, reservedRealmEnabled, securityContext)); break; - default: + case "node": profileFilters.put(entry.getKey(), new ServerTransportFilter.NodeProfile(authcService, authzService, threadPool.getThreadContext(), extractClientCert, destructiveOperations, reservedRealmEnabled, securityContext)); + break; + default: + throw new IllegalStateException("unknown profile type: " + type); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java index 7441612532e..24ebac9e1d2 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/filter/IPFilter.java @@ -23,12 +23,13 @@ import org.elasticsearch.xpack.security.audit.AuditTrailService; import java.net.InetSocketAddress; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; +import java.util.stream.Collectors; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.xpack.security.Security.setting; @@ -58,6 +59,13 @@ public class IPFilter { public static final Setting> TRANSPORT_FILTER_DENY_SETTING = Setting.listSetting(setting("transport.filter.deny"), Collections.emptyList(), Function.identity(), Property.Dynamic, Property.NodeScope); + public static final Setting.AffixSetting> PROFILE_FILTER_DENY_SETTING = Setting.affixKeySetting("transport.profiles.", + "xpack.security.filter.deny", key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), + Property.Dynamic, Property.NodeScope)); + public static final Setting.AffixSetting> PROFILE_FILTER_ALLOW_SETTING = Setting.affixKeySetting("transport.profiles.", + "xpack.security.filter.allow", key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), + Property.Dynamic, Property.NodeScope)); + private static final Setting> HTTP_FILTER_ALLOW_FALLBACK = Setting.listSetting("transport.profiles.default.xpack.security.filter.allow", TRANSPORT_FILTER_ALLOW_SETTING, s -> s, Property.NodeScope); @@ -96,7 +104,7 @@ public class IPFilter { private volatile Map rules = Collections.emptyMap(); private volatile boolean isIpFilterEnabled; private volatile boolean isHttpFilterEnabled; - private volatile Map transportGroups; + private final Set profiles; private volatile List transportAllowFilter; private volatile List transportDenyFilter; private volatile List httpAllowFilter; @@ -104,6 +112,8 @@ public class IPFilter { private final SetOnce boundTransportAddress = new SetOnce<>(); private final SetOnce boundHttpTransportAddress = new SetOnce<>(); private final SetOnce> profileBoundAddress = new SetOnce<>(); + private final Map> profileAllowRules = Collections.synchronizedMap(new HashMap<>()); + private final Map> profileDenyRules = Collections.synchronizedMap(new HashMap<>()); public IPFilter(final Settings settings, AuditTrailService auditTrail, ClusterSettings clusterSettings, XPackLicenseState licenseState) { @@ -118,15 +128,22 @@ public class IPFilter { isHttpFilterEnabled = IP_FILTER_ENABLED_HTTP_SETTING.get(settings); isIpFilterEnabled = IP_FILTER_ENABLED_SETTING.get(settings); - this.transportGroups = TcpTransport.TRANSPORT_PROFILES_SETTING.get(settings).getAsGroups(); // this is pretty crazy that we - // allow this to be updateable!!! - we have to fix this very soon + this.profiles = settings.getGroups("transport.profiles.",true).keySet().stream().filter(k -> TcpTransport + .DEFAULT_PROFILE.equals(k) == false).collect(Collectors.toSet()); // exclude default profile -- it's handled differently + for (String profile : profiles) { + Setting> allowSetting = PROFILE_FILTER_ALLOW_SETTING.getConcreteSettingForNamespace(profile); + profileAllowRules.put(profile, allowSetting.get(settings)); + Setting> denySetting = PROFILE_FILTER_DENY_SETTING.getConcreteSettingForNamespace(profile); + profileDenyRules.put(profile, denySetting.get(settings)); + } clusterSettings.addSettingsUpdateConsumer(IP_FILTER_ENABLED_HTTP_SETTING, this::setHttpFiltering); clusterSettings.addSettingsUpdateConsumer(IP_FILTER_ENABLED_SETTING, this::setTransportFiltering); clusterSettings.addSettingsUpdateConsumer(TRANSPORT_FILTER_ALLOW_SETTING, this::setTransportAllowFilter); clusterSettings.addSettingsUpdateConsumer(TRANSPORT_FILTER_DENY_SETTING, this::setTransportDenyFilter); clusterSettings.addSettingsUpdateConsumer(HTTP_FILTER_ALLOW_SETTING, this::setHttpAllowFilter); clusterSettings.addSettingsUpdateConsumer(HTTP_FILTER_DENY_SETTING, this::setHttpDenyFilter); - clusterSettings.addSettingsUpdateConsumer(TcpTransport.TRANSPORT_PROFILES_SETTING, this::setTransportProfiles); + clusterSettings.addAffixUpdateConsumer(PROFILE_FILTER_ALLOW_SETTING, this::setProfileAllowRules, (a,b) -> {}); + clusterSettings.addAffixUpdateConsumer(PROFILE_FILTER_DENY_SETTING, this::setProfileDenyRules, (a,b) -> {}); updateRules(); } @@ -140,8 +157,13 @@ public class IPFilter { return map; } - private void setTransportProfiles(Settings settings) { - transportGroups = settings.getAsGroups(); + private void setProfileAllowRules(String profile, List rules) { + profileAllowRules.put(profile, rules); + updateRules(); + } + + private void setProfileDenyRules(String profile, List rules) { + profileDenyRules.put(profile, rules); updateRules(); } @@ -215,18 +237,17 @@ public class IPFilter { if (isIpFilterEnabled && boundTransportAddress.get() != null) { TransportAddress[] localAddresses = boundTransportAddress.get().boundAddresses(); - profileRules.put("default", createRules(transportAllowFilter, transportDenyFilter, localAddresses)); - for (Map.Entry entry : transportGroups.entrySet()) { - String profile = entry.getKey(); + profileRules.put(TcpTransport.DEFAULT_PROFILE, createRules(transportAllowFilter, transportDenyFilter, localAddresses)); + for (String profile : profiles) { BoundTransportAddress profileBoundTransportAddress = profileBoundAddress.get().get(profile); if (profileBoundTransportAddress == null) { // this could happen if a user updates the settings dynamically with a new profile logger.warn("skipping ip filter rules for profile [{}] since the profile is not bound to any addresses", profile); continue; } - Settings profileSettings = entry.getValue().getByPrefix(setting("filter.")); - profileRules.put(profile, createRules(Arrays.asList(profileSettings.getAsArray("allow")), - Arrays.asList(profileSettings.getAsArray("deny")), profileBoundTransportAddress.boundAddresses())); + final List allowRules = this.profileAllowRules.getOrDefault(profile, Collections.emptyList()); + final List denyRules = this.profileDenyRules.getOrDefault(profile, Collections.emptyList()); + profileRules.put(profile, createRules(allowRules, denyRules, profileBoundTransportAddress.boundAddresses())); } } @@ -277,5 +298,7 @@ public class IPFilter { settings.add(HTTP_FILTER_DENY_SETTING); settings.add(TRANSPORT_FILTER_ALLOW_SETTING); settings.add(TRANSPORT_FILTER_DENY_SETTING); + settings.add(PROFILE_FILTER_ALLOW_SETTING); + settings.add(PROFILE_FILTER_DENY_SETTING); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4Transport.java b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4Transport.java index 08918b6c2c5..444a5c58127 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4Transport.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4Transport.java @@ -82,12 +82,12 @@ public class SecurityNetty4Transport extends Netty4Transport { } @Override - protected ChannelHandler getServerChannelInitializer(String name, Settings settings) { + protected ChannelHandler getServerChannelInitializer(String name) { SSLConfiguration configuration = profileConfiguration.get(name); if (configuration == null) { throw new IllegalStateException("unknown profile: " + name); } - return new SecurityServerChannelInitializer(settings, name, configuration); + return new SecurityServerChannelInitializer(name, configuration); } @Override @@ -130,8 +130,8 @@ public class SecurityNetty4Transport extends Netty4Transport { class SecurityServerChannelInitializer extends ServerChannelInitializer { private final SSLConfiguration configuration; - SecurityServerChannelInitializer(Settings settings, String name, SSLConfiguration configuration) { - super(name, settings); + SecurityServerChannelInitializer(String name, SSLConfiguration configuration) { + super(name); this.configuration = configuration; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java index c8ac6640291..492bda6bea1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java @@ -7,8 +7,9 @@ package org.elasticsearch.xpack.ssl; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManagerFactory; -import java.util.ArrayList; + import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; @@ -50,6 +51,108 @@ public class SSLConfigurationSettings { private final List> allSettings; + private static final Function>> CIPHERS_SETTING_TEMPLATE = key -> Setting.listSetting(key, Collections + .emptyList(), Function.identity(), Property.NodeScope, Property.Filtered); + public static final Setting> CIPHERS_SETTING_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.cipher_suites", CIPHERS_SETTING_TEMPLATE); + + private static final Function>> SUPPORTED_PROTOCOLS_TEMPLATE = key -> Setting.listSetting(key, + Collections.emptyList(), Function.identity(), Property.NodeScope, Property.Filtered); + public static final Setting> SUPPORTED_PROTOCOLS_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.supported_protocols", SUPPORTED_PROTOCOLS_TEMPLATE) ; + + private static final Function>> KEYSTORE_PATH_TEMPLATE = key -> new Setting<>(key, s -> null, + Optional::ofNullable, Property.NodeScope, Property.Filtered); + public static final Setting> KEYSTORE_PATH_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.keystore.path", KEYSTORE_PATH_TEMPLATE); + + private static final Function> LEGACY_KEYSTORE_PASSWORD_TEMPLATE = key -> new Setting<>(key, "", + SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope); + public static final Setting LEGACY_KEYSTORE_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.keystore.password", LEGACY_KEYSTORE_PASSWORD_TEMPLATE); + + private static final Function> KEYSTORE_PASSWORD_TEMPLATE = key -> SecureSetting.secureString(key, + LEGACY_KEYSTORE_PASSWORD_TEMPLATE.apply(key.replace("keystore.secure_password", "keystore.password"))); + public static final Setting KEYSTORE_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.keystore.secure_password", KEYSTORE_PASSWORD_TEMPLATE); + + private static final Function> LEGACY_KEYSTORE_KEY_PASSWORD_TEMPLATE = key -> new Setting<>(key, "", + SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope); + public static final Setting LEGACY_KEYSTORE_KEY_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.keystore.key_password", LEGACY_KEYSTORE_KEY_PASSWORD_TEMPLATE); + + private static final Function> KEYSTORE_KEY_PASSWORD_TEMPLATE = key -> + SecureSetting.secureString(key, LEGACY_KEYSTORE_KEY_PASSWORD_TEMPLATE.apply(key.replace("keystore.secure_key_password", + "keystore.key_password"))); + public static final Setting KEYSTORE_KEY_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.keystore.secure_key_password", KEYSTORE_KEY_PASSWORD_TEMPLATE); + + private static final Function>> TRUST_STORE_PATH_TEMPLATE = key -> new Setting<>(key, s -> null, + Optional::ofNullable, Property.NodeScope, Property.Filtered); + public static final Setting> TRUST_STORE_PATH_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.truststore.path", TRUST_STORE_PATH_TEMPLATE); + + private static final Function>> KEY_PATH_TEMPLATE = key -> new Setting<>(key, s -> null, + Optional::ofNullable, Property.NodeScope, Property.Filtered); + public static final Setting> KEY_PATH_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.key", KEY_PATH_TEMPLATE); + + private static final Function> LEGACY_TRUSTSTORE_PASSWORD_TEMPLATE = key -> + new Setting<>(key, "", SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope); + public static final Setting LEGACY_TRUSTSTORE_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.truststore.password", LEGACY_TRUSTSTORE_PASSWORD_TEMPLATE); + + private static final Function> TRUSTSTORE_PASSWORD_TEMPLATE = key -> + SecureSetting.secureString(key, LEGACY_TRUSTSTORE_PASSWORD_TEMPLATE.apply(key.replace("truststore.secure_password", + "truststore.password"))); + public static final Setting TRUSTSTORE_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.truststore.secure_password", TRUSTSTORE_PASSWORD_TEMPLATE); + + private static final Function> KEY_STORE_ALGORITHM_TEMPLATE = key -> + new Setting<>(key, s -> KeyManagerFactory.getDefaultAlgorithm(), + Function.identity(), Property.NodeScope, Property.Filtered); + public static final Setting KEY_STORE_ALGORITHM_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.keystore.algorithm", KEY_STORE_ALGORITHM_TEMPLATE); + + private static final Function> TRUST_STORE_ALGORITHM_TEMPLATE = key -> + new Setting<>(key, s -> TrustManagerFactory.getDefaultAlgorithm(), + Function.identity(), Property.NodeScope, Property.Filtered); + public static final Setting TRUST_STORE_ALGORITHM_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.truststore.algorithm", TRUST_STORE_ALGORITHM_TEMPLATE); + + private static final Function> LEGACY_KEY_PASSWORD_TEMPLATE = key -> new Setting<>(key, "", + SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope); + public static final Setting LEGACY_KEY_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.key_passphrase", LEGACY_KEY_PASSWORD_TEMPLATE); + + private static final Function> KEY_PASSWORD_TEMPLATE = key -> + SecureSetting.secureString(key, LEGACY_KEY_PASSWORD_TEMPLATE.apply(key.replace("secure_key_passphrase", + "key_passphrase"))); + public static final Setting KEY_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.secure_key_passphrase", KEY_PASSWORD_TEMPLATE); + + private static final Function>> CERT_TEMPLATE = key -> new Setting<>(key, s -> null, + Optional::ofNullable, Property.NodeScope, Property.Filtered); + public static final Setting> CERT_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.certificate", CERT_TEMPLATE); + + private static final Function>> CAPATH_SETTING_TEMPLATE = key -> Setting.listSetting(key, Collections + .emptyList(), Function.identity(), Property.NodeScope, Property.Filtered); + public static final Setting> CAPATH_SETTING_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.certificate_authorities", CAPATH_SETTING_TEMPLATE); + + private static final Function>> CLIENT_AUTH_SETTING_TEMPLATE = + key -> new Setting<>(key, (String) null, s -> s == null ? Optional.empty() : Optional.of(SSLClientAuth.parse(s)), + Property.NodeScope, Property.Filtered); + public static final Setting> CLIENT_AUTH_SETTING_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.client_authentication", CLIENT_AUTH_SETTING_TEMPLATE); + + private static final Function>> VERIFICATION_MODE_SETTING_TEMPLATE = + key -> new Setting<>(key, (String) null, s -> s == null ? Optional.empty() : Optional.of(VerificationMode.parse(s)), + Property.NodeScope, Property.Filtered); + public static final Setting> VERIFICATION_MODE_SETTING_PROFILES = Setting.affixKeySetting( + "transport.profiles.", "xpack.security.ssl.verification_mode", VERIFICATION_MODE_SETTING_TEMPLATE); + /** * @see #withoutPrefix * @see #withPrefix @@ -58,38 +161,25 @@ public class SSLConfigurationSettings { */ private SSLConfigurationSettings(String prefix) { assert prefix != null : "Prefix cannot be null (but can be blank)"; - - ciphers = Setting.listSetting(prefix + "cipher_suites", Collections.emptyList(), Function.identity(), - Property.NodeScope, Property.Filtered); - supportedProtocols = Setting.listSetting(prefix + "supported_protocols", Collections.emptyList(), Function.identity(), - Property.NodeScope, Property.Filtered); - keystorePath = new Setting<>(prefix + "keystore.path", s -> null, Optional::ofNullable, - Property.NodeScope, Property.Filtered); - legacyKeystorePassword = new Setting<>(prefix + "keystore.password", "", SecureString::new, - Property.Deprecated, Property.Filtered, Property.NodeScope); - keystorePassword = SecureSetting.secureString(prefix + "keystore.secure_password", legacyKeystorePassword); - legacyKeystoreKeyPassword = new Setting<>(prefix + "keystore.key_password", "", - SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope); - keystoreKeyPassword = SecureSetting.secureString(prefix + "keystore.secure_key_password", legacyKeystoreKeyPassword); - truststorePath = new Setting<>(prefix + "truststore.path", s -> null, Optional::ofNullable, Property.NodeScope, Property.Filtered); - legacyTruststorePassword = new Setting<>(prefix + "truststore.password", "", SecureString::new, - Property.Deprecated, Property.Filtered, Property.NodeScope); - truststorePassword = SecureSetting.secureString(prefix + "truststore.secure_password", legacyTruststorePassword); - keystoreAlgorithm = new Setting<>(prefix + "keystore.algorithm", s -> KeyManagerFactory.getDefaultAlgorithm(), - Function.identity(), Property.NodeScope, Property.Filtered); - truststoreAlgorithm = new Setting<>(prefix + "truststore.algorithm", s -> TrustManagerFactory.getDefaultAlgorithm(), - Function.identity(), Property.NodeScope, Property.Filtered); - keyPath = new Setting<>(prefix + "key", s -> null, Optional::ofNullable, Setting.Property.NodeScope, Setting.Property.Filtered); - legacyKeyPassword = new Setting<>(prefix + "key_passphrase", "", SecureString::new, - Property.Deprecated, Property.Filtered, Property.NodeScope); - keyPassword = SecureSetting.secureString(prefix + "secure_key_passphrase", legacyKeyPassword); - cert =new Setting<>(prefix + "certificate", s -> null, Optional::ofNullable, Property.NodeScope, Property.Filtered); - caPaths = Setting.listSetting(prefix + "certificate_authorities", Collections.emptyList(), Function.identity(), - Property.NodeScope, Property.Filtered); - clientAuth = new Setting<>(prefix + "client_authentication", (String) null, - s -> s == null ? Optional.empty() : Optional.of(SSLClientAuth.parse(s)), Property.NodeScope, Property.Filtered); - verificationMode = new Setting<>(prefix + "verification_mode", (String) null, - s -> s == null ? Optional.empty() : Optional.of(VerificationMode.parse(s)), Property.NodeScope, Property.Filtered); + ciphers = CIPHERS_SETTING_TEMPLATE.apply(prefix + "cipher_suites"); + supportedProtocols = SUPPORTED_PROTOCOLS_TEMPLATE.apply(prefix + "supported_protocols"); + keystorePath = KEYSTORE_PATH_TEMPLATE.apply(prefix + "keystore.path"); + legacyKeystorePassword = LEGACY_KEYSTORE_PASSWORD_TEMPLATE.apply(prefix + "keystore.password"); + keystorePassword = KEYSTORE_PASSWORD_TEMPLATE.apply(prefix + "keystore.secure_password"); + legacyKeystoreKeyPassword = LEGACY_KEYSTORE_KEY_PASSWORD_TEMPLATE.apply(prefix + "keystore.key_password"); + keystoreKeyPassword = KEYSTORE_KEY_PASSWORD_TEMPLATE.apply(prefix + "keystore.secure_key_password"); + truststorePath = TRUST_STORE_PATH_TEMPLATE.apply(prefix + "truststore.path"); + legacyTruststorePassword = LEGACY_TRUSTSTORE_PASSWORD_TEMPLATE.apply(prefix + "truststore.password"); + truststorePassword = TRUSTSTORE_PASSWORD_TEMPLATE.apply(prefix + "truststore.secure_password"); + keystoreAlgorithm = KEY_STORE_ALGORITHM_TEMPLATE.apply(prefix + "keystore.algorithm"); + truststoreAlgorithm = TRUST_STORE_ALGORITHM_TEMPLATE.apply(prefix + "truststore.algorithm"); + keyPath = KEY_PATH_TEMPLATE.apply(prefix + "key"); + legacyKeyPassword = LEGACY_KEY_PASSWORD_TEMPLATE.apply(prefix + "key_passphrase"); + keyPassword = KEY_PASSWORD_TEMPLATE.apply(prefix + "secure_key_passphrase"); + cert = CERT_TEMPLATE.apply(prefix + "certificate"); + caPaths = CAPATH_SETTING_TEMPLATE.apply(prefix + "certificate_authorities"); + clientAuth = CLIENT_AUTH_SETTING_TEMPLATE.apply(prefix + "client_authentication"); + verificationMode = VERIFICATION_MODE_SETTING_TEMPLATE.apply(prefix + "verification_mode"); this.allSettings = Arrays.asList(ciphers, supportedProtocols, keystorePath, keystorePassword, keystoreAlgorithm, keystoreKeyPassword, truststorePath, truststorePassword, truststoreAlgorithm, keyPath, keyPassword, cert, caPaths, @@ -117,4 +207,14 @@ public class SSLConfigurationSettings { assert prefix.endsWith("ssl.") : "The ssl config prefix (" + prefix + ") should end in 'ssl.'"; return new SSLConfigurationSettings(prefix); } + + + public static Collection> getProfileSettings() { + return Arrays.asList(CIPHERS_SETTING_PROFILES, SUPPORTED_PROTOCOLS_PROFILES, KEYSTORE_PATH_PROFILES, + LEGACY_KEYSTORE_PASSWORD_PROFILES, KEYSTORE_PASSWORD_PROFILES, LEGACY_KEYSTORE_KEY_PASSWORD_PROFILES, + KEYSTORE_KEY_PASSWORD_PROFILES, TRUST_STORE_PATH_PROFILES, LEGACY_TRUSTSTORE_PASSWORD_PROFILES, + TRUSTSTORE_PASSWORD_PROFILES, KEY_STORE_ALGORITHM_PROFILES, TRUST_STORE_ALGORITHM_PROFILES,KEY_PATH_PROFILES, + LEGACY_KEY_PASSWORD_PROFILES, KEY_PASSWORD_PROFILES,CERT_PROFILES,CAPATH_SETTING_PROFILES, + CLIENT_AUTH_SETTING_PROFILES, VERIFICATION_MODE_SETTING_PROFILES); + } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java index dd115946973..99cc36d1d02 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.common.socket.SocketAccess; import org.elasticsearch.xpack.security.Security; @@ -852,7 +851,7 @@ public class SSLService extends AbstractComponent { private static List getTransportProfileSSLSettings(Settings settings) { List sslSettings = new ArrayList<>(); - Map profiles = TcpTransport.TRANSPORT_PROFILES_SETTING.get(settings).getAsGroups(true); + Map profiles = settings.getGroups("transport.profiles.", true); for (Entry entry : profiles.entrySet()) { Settings profileSettings = entry.getValue().getByPrefix("xpack.security.ssl."); if (profileSettings.isEmpty() == false) { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java index 29ce0071375..b2c08846530 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java @@ -5,6 +5,10 @@ */ package org.elasticsearch.xpack.security.transport; +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; @@ -17,7 +21,14 @@ import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.discovery.TestZenDiscovery; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.security.Security; @@ -29,10 +40,13 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; +import java.util.concurrent.CountDownLatch; import static java.util.Collections.singletonMap; import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase { @@ -65,7 +79,6 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase settingsBuilder.put(super.nodeSettings(nodeOrdinal)) .put("transport.profiles.client.xpack.security.ssl.truststore.path", store) // settings for client truststore .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("transport.profiles.default.type", "node") .put("transport.profiles.client.xpack.security.type", "client") .put("transport.profiles.client.port", randomClientPortRange) // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent @@ -73,6 +86,9 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase .put("xpack.security.audit.enabled", false) .put(XPackSettings.WATCHER_ENABLED.getKey(), false) .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false); + if (randomBoolean()) { + settingsBuilder.put("transport.profiles.default.xpack.security.type", "node"); // this is default lets set it randomly + } SecuritySettingsSource.addSecureSettings(settingsBuilder, secureSettings -> secureSettings.setString("transport.profiles.client.xpack.security.ssl.truststore.secure_password", "testnode")); @@ -111,7 +127,7 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase } } - public void testThatConnectionToClientTypeConnectionIsRejected() throws IOException, NodeValidationException { + public void testThatConnectionToClientTypeConnectionIsRejected() throws IOException, NodeValidationException, InterruptedException { Path home = createTempDir(); Path xpackConf = home.resolve("config").resolve(XPackPlugin.NAME); Files.createDirectories(xpackConf); @@ -144,23 +160,49 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase addSSLSettingsForStore(nodeSettings, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); try (Node node = new MockNode(nodeSettings.build(), Arrays.asList(XPackPlugin.class, TestZenDiscovery.TestPlugin.class))) { node.start(); + TransportService instance = node.injector().getInstance(TransportService.class); + try (Transport.Connection connection = instance.openConnection(new DiscoveryNode("theNode", transportAddress, Version.CURRENT), + ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null))) { + // handshake should be ok + final DiscoveryNode handshake = instance.handshake(connection, 10000); + assertEquals(transport.boundAddress().publishAddress(), handshake.getAddress()); + CountDownLatch latch = new CountDownLatch(1); + instance.sendRequest(connection, NodeMappingRefreshAction.ACTION_NAME, + new NodeMappingRefreshAction.NodeMappingRefreshRequest("foo", "bar", "baz"), + TransportRequestOptions.EMPTY, + new TransportResponseHandler() { + @Override + public TransportResponse newInstance() { + fail("never get that far"); + return null; + } - // assert that node is not connected by waiting for the timeout - try { - // updating cluster settings requires a master. since the node should not be able to - // connect to the cluster, there should be no master, and therefore this - // operation should fail. we can't use cluster health/stats here to and - // wait for a timeout, because as long as the node is not connected to the cluster - // the license is disabled and therefore blocking health & stats calls. - node.client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(singletonMap("logger.org.elasticsearch.xpack.security", "DEBUG")) - .setMasterNodeTimeout(TimeValue.timeValueMillis(100)) - .get(); - fail("Expected to fail update settings as the node should not be able to connect to the cluster, cause there should be " + - "no master"); - } catch (MasterNotDiscoveredException e) { - // expected - logger.error("expected exception", e); + @Override + public void handleResponse(TransportResponse response) { + try { + fail("never get that far"); + } finally { + latch.countDown(); + } + } + + @Override + public void handleException(TransportException exp) { + try { + assertThat(exp.getCause(), instanceOf(ElasticsearchSecurityException.class)); + assertThat(exp.getCause().getMessage(), + equalTo("executing internal/shard actions is considered malicious and forbidden")); + } finally { + latch.countDown(); + } + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } + }); + latch.await(); } } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/transport/filter/IPFilterTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/transport/filter/IPFilterTests.java index b5310f67c58..6c78fdc17dd 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/transport/filter/IPFilterTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/transport/filter/IPFilterTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.Network; -import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.security.audit.AuditTrailService; @@ -62,7 +61,8 @@ public class IPFilterTests extends ESTestCase { IPFilter.IP_FILTER_ENABLED_SETTING, IPFilter.TRANSPORT_FILTER_ALLOW_SETTING, IPFilter.TRANSPORT_FILTER_DENY_SETTING, - TcpTransport.TRANSPORT_PROFILES_SETTING))); + IPFilter.PROFILE_FILTER_ALLOW_SETTING, + IPFilter.PROFILE_FILTER_DENY_SETTING))); httpTransport = mock(HttpServerTransport.class); TransportAddress httpAddress = new TransportAddress(InetAddress.getLoopbackAddress(), 9200); @@ -145,6 +145,27 @@ public class IPFilterTests extends ESTestCase { assertAddressIsDeniedForProfile("client", "192.168.0.2"); } + public void testThatProfilesAreUpdateable() throws Exception { + Settings settings = Settings.builder() + .put("xpack.security.transport.filter.allow", "localhost") + .put("xpack.security.transport.filter.deny", "_all") + .put("transport.profiles.client.xpack.security.filter.allow", "192.168.0.1") + .put("transport.profiles.client.xpack.security.filter.deny", "_all") + .build(); + ipFilter = new IPFilter(settings, auditTrail, clusterSettings, licenseState); + ipFilter.setBoundTransportAddress(transport.boundAddress(), transport.profileBoundAddresses()); + Settings newSettings = Settings.builder().putArray("transport.profiles.client.xpack.security.filter.allow", "192.168.0.1", + "192.168.0.2") + .put("transport.profiles.client.xpack.security.filter.deny", "192.168.0.3").build(); + Settings.Builder updatedSettingsBuilder = Settings.builder(); + clusterSettings.updateDynamicSettings(newSettings, updatedSettingsBuilder, Settings.builder(), "test"); + clusterSettings.applySettings(updatedSettingsBuilder.build()); + assertAddressIsAllowed("127.0.0.1"); + assertAddressIsDenied("192.168.0.1"); + assertAddressIsAllowedForProfile("client", "192.168.0.1", "192.168.0.2"); + assertAddressIsDeniedForProfile("client", "192.168.0.3"); + } + public void testThatAllowWinsOverDeny() throws Exception { Settings settings = Settings.builder() .put("xpack.security.transport.filter.allow", "10.0.0.1") diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IpFilterRemoteAddressFilterTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IpFilterRemoteAddressFilterTests.java index fa08ee0a7b1..984a2939b1f 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IpFilterRemoteAddressFilterTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IpFilterRemoteAddressFilterTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; +import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.junit.Before; @@ -54,7 +55,8 @@ public class IpFilterRemoteAddressFilterTests extends ESTestCase { IPFilter.IP_FILTER_ENABLED_SETTING, IPFilter.TRANSPORT_FILTER_ALLOW_SETTING, IPFilter.TRANSPORT_FILTER_DENY_SETTING, - TcpTransport.TRANSPORT_PROFILES_SETTING))); + IPFilter.PROFILE_FILTER_ALLOW_SETTING, + IPFilter.PROFILE_FILTER_DENY_SETTING))); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isIpFilteringAllowed()).thenReturn(true); AuditTrailService auditTrailService = new AuditTrailService(settings, Collections.emptyList(), licenseState); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportTests.java index c745af5fe04..7b7fe359084 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4TransportTests.java @@ -73,7 +73,7 @@ public class SecurityNetty4TransportTests extends ESTestCase { public void testThatProfileTakesDefaultSSLSetting() throws Exception { SecurityNetty4Transport transport = createTransport(); Netty4MockUtil.setOpenChannelsHandlerToMock(transport); - ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY); + ChannelHandler handler = transport.getServerChannelInitializer("default"); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine(), notNullValue()); } @@ -81,7 +81,7 @@ public class SecurityNetty4TransportTests extends ESTestCase { public void testDefaultClientAuth() throws Exception { SecurityNetty4Transport transport = createTransport(); Netty4MockUtil.setOpenChannelsHandlerToMock(transport); - ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY); + ChannelHandler handler = transport.getServerChannelInitializer("default"); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true)); assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false)); @@ -96,7 +96,7 @@ public class SecurityNetty4TransportTests extends ESTestCase { sslService = new SSLService(settings, env); SecurityNetty4Transport transport = createTransport(settings); Netty4MockUtil.setOpenChannelsHandlerToMock(transport); - ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY); + ChannelHandler handler = transport.getServerChannelInitializer("default"); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true)); assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false)); @@ -111,7 +111,7 @@ public class SecurityNetty4TransportTests extends ESTestCase { sslService = new SSLService(settings, env); SecurityNetty4Transport transport = createTransport(settings); Netty4MockUtil.setOpenChannelsHandlerToMock(transport); - ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY); + ChannelHandler handler = transport.getServerChannelInitializer("default"); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false)); @@ -126,7 +126,7 @@ public class SecurityNetty4TransportTests extends ESTestCase { sslService = new SSLService(settings, env); SecurityNetty4Transport transport = createTransport(settings); Netty4MockUtil.setOpenChannelsHandlerToMock(transport); - ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY); + ChannelHandler handler = transport.getServerChannelInitializer("default"); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(true)); @@ -136,13 +136,13 @@ public class SecurityNetty4TransportTests extends ESTestCase { String value = randomFrom(SSLClientAuth.REQUIRED.name(), SSLClientAuth.REQUIRED.name().toLowerCase(Locale.ROOT)); Settings settings = Settings.builder() .put(env.settings()) + .put("transport.profiles.client.port", "8000-9000") .put("transport.profiles.client.xpack.security.ssl.client_authentication", value) .build(); sslService = new SSLService(settings, env); SecurityNetty4Transport transport = createTransport(settings); Netty4MockUtil.setOpenChannelsHandlerToMock(transport); - ChannelHandler handler = transport.getServerChannelInitializer("client", - Settings.builder().put("xpack.security.ssl.client_authentication", value).build()); + ChannelHandler handler = transport.getServerChannelInitializer("client"); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true)); assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false)); @@ -152,13 +152,13 @@ public class SecurityNetty4TransportTests extends ESTestCase { String value = randomFrom(SSLClientAuth.NONE.name(), SSLClientAuth.NONE.name().toLowerCase(Locale.ROOT)); Settings settings = Settings.builder() .put(env.settings()) + .put("transport.profiles.client.port", "8000-9000") .put("transport.profiles.client.xpack.security.ssl.client_authentication", value) .build(); sslService = new SSLService(settings, env); SecurityNetty4Transport transport = createTransport(settings); Netty4MockUtil.setOpenChannelsHandlerToMock(transport); - ChannelHandler handler = transport.getServerChannelInitializer("client", - Settings.builder().put("xpack.security.ssl.client_authentication", value).build()); + ChannelHandler handler = transport.getServerChannelInitializer("client"); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false)); @@ -168,13 +168,13 @@ public class SecurityNetty4TransportTests extends ESTestCase { String value = randomFrom(SSLClientAuth.OPTIONAL.name(), SSLClientAuth.OPTIONAL.name().toLowerCase(Locale.ROOT)); Settings settings = Settings.builder() .put(env.settings()) + .put("transport.profiles.client.port", "8000-9000") .put("transport.profiles.client.xpack.security.ssl.client_authentication", value) .build(); sslService = new SSLService(settings, env); SecurityNetty4Transport transport = createTransport(settings); Netty4MockUtil.setOpenChannelsHandlerToMock(transport); - final ChannelHandler handler = transport.getServerChannelInitializer("client", - Settings.builder().put("xpack.security.ssl.client_authentication", value).build()); + final ChannelHandler handler = transport.getServerChannelInitializer("client"); final EmbeddedChannel ch = new EmbeddedChannel(handler); assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false)); assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(true)); @@ -197,7 +197,7 @@ public class SecurityNetty4TransportTests extends ESTestCase { sslService = new SSLService(settings, env); SecurityNetty4Transport transport = createTransport(settings); Netty4MockUtil.setOpenChannelsHandlerToMock(transport); - final ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY); + final ChannelHandler handler = transport.getServerChannelInitializer("default"); final EmbeddedChannel ch = new EmbeddedChannel(handler); final SSLEngine engine = ch.pipeline().get(SslHandler.class).engine(); assertFalse(engine.getNeedClientAuth());