mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-02 17:09:18 +00:00
Merge branch 'master' into feature/sql
Original commit: elastic/x-pack-elasticsearch@ed7a977328
This commit is contained in:
commit
184d53f598
@ -82,7 +82,10 @@ Continue with installation? [y/N]y
|
||||
----------------------------------------------------------
|
||||
--
|
||||
|
||||
. If you have disabled automatic index creation in {es}, configure
|
||||
. {xpack} will try to automatically create a number of indices within {es}.
|
||||
By default, {es} is configured to allow automatic index creation, and no
|
||||
additional steps are required. However, if you have disabled automatic index
|
||||
creation in {es}, you must configure
|
||||
{ref}/docs-index_.html#index-creation[`action.auto_create_index`] in
|
||||
`elasticsearch.yml` to allow {xpack} to create the following indices:
|
||||
+
|
||||
@ -92,6 +95,16 @@ Continue with installation? [y/N]y
|
||||
action.auto_create_index: .security,.monitoring*,.watches,.triggered_watches,.watcher-history*,.ml*
|
||||
-----------------------------------------------------------
|
||||
--
|
||||
+
|
||||
[IMPORTANT]
|
||||
=============================================================================
|
||||
If you are using https://www.elastic.co/products/logstash[Logstash]
|
||||
or https://www.elastic.co/products/beats[Beats] then you will most likely
|
||||
require additional index names in your `action.auto_create_index` setting, and
|
||||
the exact value will depend on your local configuration. If you are unsure of
|
||||
the correct value for your environment, you may consider setting the value to
|
||||
`*` which will allow automatic creation of all indices.
|
||||
=============================================================================
|
||||
|
||||
. Start {es}.
|
||||
+
|
||||
|
@ -92,7 +92,7 @@ POST _xpack/ml/anomaly_detectors/it_ops_new_logs/_update
|
||||
"enabled": true
|
||||
},
|
||||
"analysis_limits": {
|
||||
"model_memory_limit": 1024
|
||||
"model_memory_limit": "1024mb"
|
||||
},
|
||||
"renormalization_window_days": 30,
|
||||
"background_persist_interval": "2h",
|
||||
@ -135,7 +135,7 @@ information, including the updated property values. For example:
|
||||
"influencers": []
|
||||
},
|
||||
"analysis_limits": {
|
||||
"model_memory_limit": 1024
|
||||
"model_memory_limit": "1024mb"
|
||||
},
|
||||
"data_description": {
|
||||
"time_field": "time",
|
||||
|
@ -65,11 +65,40 @@ xpack:
|
||||
username_pattern: "EMAILADDRESS=(.*?)(?:,|$)"
|
||||
------------------------------------------------------------
|
||||
+
|
||||
You can also specify which truststore to use for authentication. This is useful
|
||||
when the SSL/TLS layer trusts clients with certificates that are signed by a
|
||||
different CA than the one that signs your users' certificates. To specify the
|
||||
location of the truststore, specify the `truststore.path` option:
|
||||
+
|
||||
. Restart Elasticsearch.
|
||||
|
||||
[[pki-ssl-config]]
|
||||
==== PKI and SSL Settings
|
||||
|
||||
The PKI realm relies on the SSL settings of the node's network interface
|
||||
(transport or http). The realm can be configured to be more restrictive than
|
||||
the underlying network connection - that is, it is possible to configure the
|
||||
node such that some connections are accepted by the network interface but then
|
||||
fail to be authenticated by the PKI realm. However the reverse is not possible
|
||||
- the PKI realm cannot authenticate a connection that has been refused by the
|
||||
network interface.
|
||||
|
||||
In particular this means:
|
||||
|
||||
* The transport or http interface must request client certificates by setting
|
||||
`client_authentication` to `optional` or `required`.
|
||||
* The interface must _trust_ the certificate that is presented by the client
|
||||
by configuring either the `truststore` or `certificate_authorities` paths,
|
||||
or by setting `verification_mode` to `none`.
|
||||
* The _protocols_ supported by the interface must be compatible with those
|
||||
used by the client.
|
||||
|
||||
|
||||
The relevant network interface (transport or http) must be configured to trust
|
||||
any certificate that is to be used within the PKI realm. However it possible to
|
||||
configure the PKI realm to trust only a _subset_ of the certificates accepted
|
||||
by the network interface.
|
||||
This is useful when the SSL/TLS layer trusts clients with certificates that are
|
||||
signed by a different CA than the one that signs your users' certificates.
|
||||
|
||||
To configure the PKI realm with its own truststore, specify the
|
||||
`truststore.path` option as below:
|
||||
|
||||
[source, yaml]
|
||||
------------------------------------------------------------
|
||||
xpack:
|
||||
@ -83,35 +112,41 @@ xpack:
|
||||
password: "x-pack-test-password"
|
||||
------------------------------------------------------------
|
||||
|
||||
. Restart Elasticsearch.
|
||||
The `certificate_authorities` option may be used as an alternative to the
|
||||
`truststore.path` setting.
|
||||
|
||||
|
||||
[[pki-settings]]
|
||||
===== PKI Realm Settings
|
||||
|
||||
[cols="4,^3,10"]
|
||||
|=======================
|
||||
| Setting | Required | Description
|
||||
| `type` | yes | Indicates the realm type. Must be set to `pki`.
|
||||
| `order` | no | Indicates the priority of this realm within the realm
|
||||
chain. Realms with a lower order are consulted first.
|
||||
Although not required, we recommend explicitly
|
||||
setting this value when you configure multiple realms.
|
||||
Defaults to `Integer.MAX_VALUE`.
|
||||
| `enabled` | no | Indicates whether this realm is enabled or disabled.
|
||||
Enables you to disable a realm without removing its
|
||||
configuration. Defaults to `true`.
|
||||
| `username_pattern` | no | Specifies the regular expression pattern used to extract
|
||||
the username from the certificate DN. The first match
|
||||
group is used as the username. Defaults to `CN=(.*?)(?:,\|$)`.
|
||||
| `truststore.path` | no | The path to the truststore. Defaults to the path
|
||||
defined by {ref}/security-settings.html#ssl-tls-settings[SSL/TLS settings].
|
||||
| `truststore.password` | no/yes | Specifies the password for the truststore. Must be
|
||||
provided if `truststore.path` is set.
|
||||
| `truststore.algorithm` | no | Specifies the algorithm used for the truststore.
|
||||
Defaults to `SunX509`.
|
||||
| `files.role_mapping` | no | Specifies the <<security-files-location,location>>
|
||||
for the <<pki-role-mapping, YAML role mapping configuration file>>.
|
||||
Defaults to `CONFIG_DIR/x-pack/role_mapping.yml`.
|
||||
| Setting | Required | Description
|
||||
| `type` | yes | Indicates the realm type. Must be set to `pki`.
|
||||
| `order` | no | Indicates the priority of this realm within the realm
|
||||
chain. Realms with a lower order are consulted first.
|
||||
Although not required, we recommend explicitly
|
||||
setting this value when you configure multiple realms.
|
||||
Defaults to `Integer.MAX_VALUE`.
|
||||
| `enabled` | no | Indicates whether this realm is enabled or disabled.
|
||||
Enables you to disable a realm without removing its
|
||||
configuration. Defaults to `true`.
|
||||
| `username_pattern` | no | Specifies the regular expression pattern used to extract
|
||||
the username from the certificate DN. The first match
|
||||
group is used as the username. Defaults to `CN=(.*?)(?:,\|$)`.
|
||||
| `certificate_authorities` | no | List of paths to the PEM encoded certificate files
|
||||
that should be trusted.
|
||||
This setting may not be used with `truststore.path`.
|
||||
| `truststore.path` | no | The path to the truststore. Defaults to the path
|
||||
defined by {ref}/security-settings.html#ssl-tls-settings[SSL/TLS settings].
|
||||
This setting may not be used with `certificate_authorities`.
|
||||
| `truststore.password` | no/yes | Specifies the password for the truststore. Must be
|
||||
provided if `truststore.path` is set.
|
||||
| `truststore.algorithm` | no | Specifies the algorithm used for the truststore.
|
||||
Defaults to `SunX509`.
|
||||
| `files.role_mapping` | no | Specifies the <<security-files-location,location>>
|
||||
for the <<pki-role-mapping, YAML role mapping configuration file>>.
|
||||
Defaults to `CONFIG_DIR/x-pack/role_mapping.yml`.
|
||||
|=======================
|
||||
|
||||
[[assigning-roles-pki]]
|
||||
@ -151,4 +186,16 @@ user: <1>
|
||||
<1> The name of a role.
|
||||
<2> The distinguished name (DN) of a PKI user.
|
||||
|
||||
The disinguished name for a PKI user follows X.500 naming conventions which
|
||||
place the most specific fields (like `cn` or `uid`) at the beginning of the
|
||||
name, and the most general fields (like `o` or `dc`) at the end of the name.
|
||||
Some tools, such as _openssl_, may print out the subject name in a different
|
||||
format.
|
||||
|
||||
One way that you can determine the correct DN for a certificate is to use the
|
||||
{ref}/security-api-authenticate.html[authenticate API] (use the relevant PKI
|
||||
certificate as the means of authentication) and inspect the metadata field in
|
||||
the result. The user's distinguished name will be populated under the `pki_dn`
|
||||
key. You can also use the authenticate API to validate your role mapping.
|
||||
|
||||
For more information, see <<mapping-roles, Mapping Users and Groups to Roles>>.
|
||||
|
@ -258,6 +258,12 @@ are values. The mappings can have a many-to-many relationship. When you map role
|
||||
to groups, the roles of a user in that group are the combination of the roles
|
||||
assigned to that group and the roles assigned to that user.
|
||||
|
||||
By default, {security} checks role mapping files for changes every 5 seconds.
|
||||
You can change this default behavior by changing the
|
||||
`resource.reload.interval.high` setting in the `elasticsearch.yml` file
|
||||
(as this is a common setting in Elasticsearch, changing its value may effect
|
||||
other schedules in the system).
|
||||
|
||||
==== Realm Specific Details
|
||||
[float]
|
||||
[[ldap-role-mapping]]
|
||||
|
@ -8,8 +8,10 @@ users, you can use the _run as_ mechanism to restrict data access according to
|
||||
|
||||
To "run as" (impersonate) another user, you must be able to retrieve the user from
|
||||
the realm you use to authenticate. Both the internal `native` and `file` realms
|
||||
support this out of the box. The LDAP realm however must be configured to enable
|
||||
user search. For more information, see <<ldap-user-search, Configuring an LDAP Realm with User Search>>.
|
||||
support this out of the box. The LDAP realm however must be configured to run in
|
||||
_user search_ mode. For more information, see
|
||||
<<ldap-user-search, Configuring an LDAP Realm with User Search>>.
|
||||
The Active Directory and PKI realms do not support "run as".
|
||||
|
||||
To submit requests on behalf of other users, you need to have the `run_as`
|
||||
permission. For example, the following role grants permission to submit request
|
||||
|
@ -13,7 +13,7 @@ to `elasticsearch.yml`:
|
||||
--------------------------------------------------
|
||||
transport.profiles.client: <1>
|
||||
port: 9500-9600 <2>
|
||||
shield:
|
||||
xpack.security:
|
||||
type: client <3>
|
||||
--------------------------------------------------
|
||||
<1> `client` is the name of this example profile
|
||||
|
@ -73,22 +73,27 @@ plugins in your Logstash `.conf` file. For example:
|
||||
+
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
input {
|
||||
input {
|
||||
elasticsearch {
|
||||
...
|
||||
user => logstash_internal
|
||||
password => x-pack-test-password
|
||||
}
|
||||
}
|
||||
filter {
|
||||
elasticsearch {
|
||||
...
|
||||
user => logstash_internal
|
||||
password => x-pack-test-password
|
||||
}
|
||||
}
|
||||
output {
|
||||
elasticsearch {
|
||||
...
|
||||
user => logstash_internal
|
||||
password => x-pack-test-password
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
[float]
|
||||
|
@ -494,10 +494,12 @@ Defaults to `CN=(.*?)(?:,\|$)`
|
||||
`certificate_authorities`::
|
||||
List of PEM certificate files that should be used to authenticate a
|
||||
user's certificate as trusted. Defaults to the trusted certificates configured for SSL.
|
||||
See the {xpack-ref}/pki-realm.html#pki-ssl-config[SSL settings] section of the PKI realm documentation for more information.
|
||||
This setting may not be used with `truststore.path`.
|
||||
|
||||
`truststore.path`::
|
||||
The path of a truststore to use. Defaults to the trusted certificates configured for SSL.
|
||||
See the {xpack-ref}/pki-realm.html#pki-ssl-config[SSL settings] section of the PKI realm documentation for more information.
|
||||
This setting may not be used with `certificate_authorities`.
|
||||
|
||||
`truststore.password`::
|
||||
|
@ -1 +0,0 @@
|
||||
84da342824017dcbeefda0becfef11ce2b5836da
|
1
plugin/licenses/netty-buffer-4.1.13.Final.jar.sha1
Normal file
1
plugin/licenses/netty-buffer-4.1.13.Final.jar.sha1
Normal file
@ -0,0 +1 @@
|
||||
0e3f583ea8a2618a7563b1ee2aa696c23edcc3d8
|
@ -1 +0,0 @@
|
||||
d9ffe2192b567a4df052f6a36e7b7090b510e0cf
|
1
plugin/licenses/netty-codec-4.1.13.Final.jar.sha1
Normal file
1
plugin/licenses/netty-codec-4.1.13.Final.jar.sha1
Normal file
@ -0,0 +1 @@
|
||||
370eeb6e9d92495a2a3be096ab6102755af76730
|
@ -1 +0,0 @@
|
||||
3edeb0f08e455e570a55eb56bf64595fcb1a6b15
|
1
plugin/licenses/netty-codec-http-4.1.13.Final.jar.sha1
Normal file
1
plugin/licenses/netty-codec-http-4.1.13.Final.jar.sha1
Normal file
@ -0,0 +1 @@
|
||||
0ee87368766e6b900cf6be8ac9cdce27156e9411
|
@ -1 +0,0 @@
|
||||
f79a702bc5f275832ae18e33ba3d2a264a4aa728
|
1
plugin/licenses/netty-common-4.1.13.Final.jar.sha1
Normal file
1
plugin/licenses/netty-common-4.1.13.Final.jar.sha1
Normal file
@ -0,0 +1 @@
|
||||
f640e8cd8866527150784f8986152d3bba45b712
|
@ -1 +0,0 @@
|
||||
6f43aae489b2e4fd7446cd347b077bb058a225d8
|
1
plugin/licenses/netty-handler-4.1.13.Final.jar.sha1
Normal file
1
plugin/licenses/netty-handler-4.1.13.Final.jar.sha1
Normal file
@ -0,0 +1 @@
|
||||
85847aa81a98d29948731befb4784d141046fa0e
|
@ -1 +0,0 @@
|
||||
3310d435f97ef9769dd5659dae3ef762ee3f0f57
|
1
plugin/licenses/netty-resolver-4.1.13.Final.jar.sha1
Normal file
1
plugin/licenses/netty-resolver-4.1.13.Final.jar.sha1
Normal file
@ -0,0 +1 @@
|
||||
d33ce420bd22c8a53246296ceb6e1ff08d31f8e1
|
@ -1 +0,0 @@
|
||||
6244fb27cbc24a8d006e9aaaead6b25dcf3aa2e1
|
1
plugin/licenses/netty-transport-4.1.13.Final.jar.sha1
Normal file
1
plugin/licenses/netty-transport-4.1.13.Final.jar.sha1
Normal file
@ -0,0 +1 @@
|
||||
5008406221a849a350ad2a8885f14ac330e038f3
|
@ -233,7 +233,7 @@ public class MachineLearning implements ActionPlugin {
|
||||
return Arrays.asList(
|
||||
// Custom metadata
|
||||
new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField("ml"),
|
||||
parser -> MlMetadata.ML_METADATA_PARSER.parse(parser, null).build()),
|
||||
parser -> MlMetadata.METADATA_PARSER.parse(parser, null).build()),
|
||||
new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField(PersistentTasksCustomMetaData.TYPE),
|
||||
PersistentTasksCustomMetaData::fromXContent),
|
||||
|
||||
|
@ -53,11 +53,13 @@ public class MlMetadata implements MetaData.Custom {
|
||||
|
||||
public static final String TYPE = "ml";
|
||||
public static final MlMetadata EMPTY_METADATA = new MlMetadata(Collections.emptySortedMap(), Collections.emptySortedMap());
|
||||
public static final ObjectParser<Builder, Void> ML_METADATA_PARSER = new ObjectParser<>("ml_metadata", Builder::new);
|
||||
// This parser follows the pattern that metadata is parsed leniently (to allow for enhancements)
|
||||
public static final ObjectParser<Builder, Void> METADATA_PARSER = new ObjectParser<>("ml_metadata", true, Builder::new);
|
||||
|
||||
static {
|
||||
ML_METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.PARSER.apply(p, c).build(), JOBS_FIELD);
|
||||
ML_METADATA_PARSER.declareObjectArray(Builder::putDatafeeds, (p, c) -> DatafeedConfig.PARSER.apply(p, c).build(), DATAFEEDS_FIELD);
|
||||
METADATA_PARSER.declareObjectArray(Builder::putJobs, (p, c) -> Job.METADATA_PARSER.apply(p, c).build(), JOBS_FIELD);
|
||||
METADATA_PARSER.declareObjectArray(Builder::putDatafeeds,
|
||||
(p, c) -> DatafeedConfig.METADATA_PARSER.apply(p, c).build(), DATAFEEDS_FIELD);
|
||||
}
|
||||
|
||||
private final SortedMap<String, Job> jobs;
|
||||
|
@ -0,0 +1,19 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml;
|
||||
|
||||
/**
|
||||
* In order to allow enhancements that require additions to the ML custom cluster state to be made in minor versions,
|
||||
* when we parse our metadata from persisted cluster state we ignore unknown fields. However, we don't want to be
|
||||
* lenient when parsing config as this would mean user mistakes could go undetected. Therefore, for all JSON objects
|
||||
* that are used in both custom cluster state and config we have two parsers, one tolerant of unknown fields (for
|
||||
* parsing cluster state) and one strict (for parsing config). This class enumerates the two options.
|
||||
*/
|
||||
public enum MlParserType {
|
||||
|
||||
METADATA, CONFIG;
|
||||
|
||||
}
|
@ -74,7 +74,7 @@ public class PutDatafeedAction extends Action<PutDatafeedAction.Request, PutData
|
||||
public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
|
||||
|
||||
public static Request parseRequest(String datafeedId, XContentParser parser) {
|
||||
DatafeedConfig.Builder datafeed = DatafeedConfig.PARSER.apply(parser, null);
|
||||
DatafeedConfig.Builder datafeed = DatafeedConfig.CONFIG_PARSER.apply(parser, null);
|
||||
datafeed.setId(datafeedId);
|
||||
return new Request(datafeed.build());
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ public class PutJobAction extends Action<PutJobAction.Request, PutJobAction.Resp
|
||||
public static class Request extends AcknowledgedRequest<Request> implements ToXContent {
|
||||
|
||||
public static Request parseRequest(String jobId, XContentParser parser) {
|
||||
Job.Builder jobBuilder = Job.PARSER.apply(parser, null);
|
||||
Job.Builder jobBuilder = Job.CONFIG_PARSER.apply(parser, null);
|
||||
if (jobBuilder.getId() == null) {
|
||||
jobBuilder.setId(jobId);
|
||||
} else if (!Strings.isNullOrEmpty(jobId) && !jobId.equals(jobBuilder.getId())) {
|
||||
|
@ -62,7 +62,7 @@ extends Action<ValidateDetectorAction.Request, ValidateDetectorAction.Response,
|
||||
private Detector detector;
|
||||
|
||||
public static Request parseRequest(XContentParser parser) {
|
||||
Detector detector = Detector.PARSER.apply(parser, null).build();
|
||||
Detector detector = Detector.CONFIG_PARSER.apply(parser, null).build();
|
||||
return new Request(detector);
|
||||
}
|
||||
|
||||
|
@ -63,7 +63,7 @@ extends Action<ValidateJobConfigAction.Request, ValidateJobConfigAction.Response
|
||||
private Job job;
|
||||
|
||||
public static Request parseRequest(XContentParser parser) {
|
||||
Job.Builder job = Job.PARSER.apply(parser, null);
|
||||
Job.Builder job = Job.CONFIG_PARSER.apply(parser, null);
|
||||
// When jobs are PUT their ID must be supplied in the URL - assume this will
|
||||
// be valid unless an invalid job ID is specified in the JSON to be validated
|
||||
job.setId(job.getId() != null ? job.getId() : "ok");
|
||||
|
@ -16,10 +16,13 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -30,22 +33,33 @@ public class ChunkingConfig implements ToXContentObject, Writeable {
|
||||
public static final ParseField MODE_FIELD = new ParseField("mode");
|
||||
public static final ParseField TIME_SPAN_FIELD = new ParseField("time_span");
|
||||
|
||||
public static final ConstructingObjectParser<ChunkingConfig, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"chunking_config", a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1]));
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
public static final ConstructingObjectParser<ChunkingConfig, Void> METADATA_PARSER = new ConstructingObjectParser<>(
|
||||
"chunking_config", true, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1]));
|
||||
public static final ConstructingObjectParser<ChunkingConfig, Void> CONFIG_PARSER = new ConstructingObjectParser<>(
|
||||
"chunking_config", false, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1]));
|
||||
public static final Map<MlParserType, ConstructingObjectParser<ChunkingConfig, Void>> PARSERS =
|
||||
new EnumMap<>(MlParserType.class);
|
||||
|
||||
static {
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return Mode.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, MODE_FIELD, ValueType.STRING);
|
||||
PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return TimeValue.parseTimeValue(p.text(), TIME_SPAN_FIELD.getPreferredName());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, TIME_SPAN_FIELD, ValueType.STRING);
|
||||
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
|
||||
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
|
||||
for (MlParserType parserType : MlParserType.values()) {
|
||||
ConstructingObjectParser<ChunkingConfig, Void> parser = PARSERS.get(parserType);
|
||||
assert parser != null;
|
||||
parser.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return Mode.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, MODE_FIELD, ValueType.STRING);
|
||||
parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return TimeValue.parseTimeValue(p.text(), TIME_SPAN_FIELD.getPreferredName());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, TIME_SPAN_FIELD, ValueType.STRING);
|
||||
}
|
||||
}
|
||||
|
||||
private final Mode mode;
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.job.config.Job;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
@ -37,7 +38,9 @@ import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.EnumMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@ -74,35 +77,42 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
||||
public static final ParseField SOURCE = new ParseField("_source");
|
||||
public static final ParseField CHUNKING_CONFIG = new ParseField("chunking_config");
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("datafeed_config", Builder::new);
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
public static final ObjectParser<Builder, Void> METADATA_PARSER = new ObjectParser<>("datafeed_config", true, Builder::new);
|
||||
public static final ObjectParser<Builder, Void> CONFIG_PARSER = new ObjectParser<>("datafeed_config", false, Builder::new);
|
||||
public static final Map<MlParserType, ObjectParser<Builder, Void>> PARSERS = new EnumMap<>(MlParserType.class);
|
||||
|
||||
static {
|
||||
PARSER.declareString(Builder::setId, ID);
|
||||
PARSER.declareString(Builder::setJobId, Job.ID);
|
||||
PARSER.declareStringArray(Builder::setIndices, INDEXES);
|
||||
PARSER.declareStringArray(Builder::setIndices, INDICES);
|
||||
PARSER.declareStringArray(Builder::setTypes, TYPES);
|
||||
PARSER.declareString((builder, val) ->
|
||||
builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY);
|
||||
PARSER.declareString((builder, val) ->
|
||||
builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY);
|
||||
PARSER.declareObject(Builder::setQuery,
|
||||
(p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY);
|
||||
PARSER.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p),
|
||||
AGGREGATIONS);
|
||||
PARSER.declareObject(Builder::setAggregations,(p, c) -> AggregatorFactories.parseAggregators(p), AGGS);
|
||||
PARSER.declareObject(Builder::setScriptFields, (p, c) -> {
|
||||
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
|
||||
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
|
||||
for (MlParserType parserType : MlParserType.values()) {
|
||||
ObjectParser<Builder, Void> parser = PARSERS.get(parserType);
|
||||
assert parser != null;
|
||||
parser.declareString(Builder::setId, ID);
|
||||
parser.declareString(Builder::setJobId, Job.ID);
|
||||
parser.declareStringArray(Builder::setIndices, INDEXES);
|
||||
parser.declareStringArray(Builder::setIndices, INDICES);
|
||||
parser.declareStringArray(Builder::setTypes, TYPES);
|
||||
parser.declareString((builder, val) ->
|
||||
builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY);
|
||||
parser.declareString((builder, val) ->
|
||||
builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY);
|
||||
parser.declareObject(Builder::setQuery, (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY);
|
||||
parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGREGATIONS);
|
||||
parser.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGS);
|
||||
parser.declareObject(Builder::setScriptFields, (p, c) -> {
|
||||
List<SearchSourceBuilder.ScriptField> parsedScriptFields = new ArrayList<>();
|
||||
while (p.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p));
|
||||
}
|
||||
parsedScriptFields.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName));
|
||||
return parsedScriptFields;
|
||||
}, SCRIPT_FIELDS);
|
||||
PARSER.declareInt(Builder::setScrollSize, SCROLL_SIZE);
|
||||
// TODO this is to read former _source field. Remove in v7.0.0
|
||||
PARSER.declareBoolean((builder, value) -> {}, SOURCE);
|
||||
PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, CHUNKING_CONFIG);
|
||||
}
|
||||
parsedScriptFields.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName));
|
||||
return parsedScriptFields;
|
||||
}, SCRIPT_FIELDS);
|
||||
parser.declareInt(Builder::setScrollSize, SCROLL_SIZE);
|
||||
// TODO this is to read former _source field. Remove in v7.0.0
|
||||
parser.declareBoolean((builder, value) -> {}, SOURCE);
|
||||
parser.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSERS.get(parserType), CHUNKING_CONFIG);
|
||||
}
|
||||
}
|
||||
|
||||
private final String id;
|
||||
|
@ -63,7 +63,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
||||
return parsedScriptFields;
|
||||
}, DatafeedConfig.SCRIPT_FIELDS);
|
||||
PARSER.declareInt(Builder::setScrollSize, DatafeedConfig.SCROLL_SIZE);
|
||||
PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, DatafeedConfig.CHUNKING_CONFIG);
|
||||
PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.CONFIG_PARSER, DatafeedConfig.CHUNKING_CONFIG);
|
||||
}
|
||||
|
||||
private final String id;
|
||||
|
@ -14,6 +14,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
|
||||
@ -21,8 +22,10 @@ import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
@ -70,27 +73,42 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
|
||||
|
||||
public static final long DEFAULT_RESULT_FINALIZATION_WINDOW = 2L;
|
||||
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<AnalysisConfig.Builder, Void> PARSER =
|
||||
new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), a -> new AnalysisConfig.Builder((List<Detector>) a[0]));
|
||||
public static final ConstructingObjectParser<AnalysisConfig.Builder, Void> METADATA_PARSER =
|
||||
new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), true,
|
||||
a -> new AnalysisConfig.Builder((List<Detector>) a[0]));
|
||||
@SuppressWarnings("unchecked")
|
||||
public static final ConstructingObjectParser<AnalysisConfig.Builder, Void> CONFIG_PARSER =
|
||||
new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), false,
|
||||
a -> new AnalysisConfig.Builder((List<Detector>) a[0]));
|
||||
public static final Map<MlParserType, ConstructingObjectParser<Builder, Void>> PARSERS =
|
||||
new EnumMap<>(MlParserType.class);
|
||||
|
||||
static {
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> Detector.PARSER.apply(p, c).build(), DETECTORS);
|
||||
PARSER.declareString((builder, val) ->
|
||||
builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), BUCKET_SPAN);
|
||||
PARSER.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME);
|
||||
PARSER.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS);
|
||||
PARSER.declareString((builder, val) ->
|
||||
builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY);
|
||||
PARSER.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME);
|
||||
PARSER.declareStringArray(Builder::setInfluencers, INFLUENCERS);
|
||||
PARSER.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS);
|
||||
PARSER.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW);
|
||||
PARSER.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS);
|
||||
PARSER.declareStringArray((builder, values) -> builder.setMultipleBucketSpans(
|
||||
values.stream().map(v -> TimeValue.parseTimeValue(v, MULTIPLE_BUCKET_SPANS.getPreferredName()))
|
||||
.collect(Collectors.toList())), MULTIPLE_BUCKET_SPANS);
|
||||
PARSER.declareBoolean(Builder::setUsePerPartitionNormalization, USER_PER_PARTITION_NORMALIZATION);
|
||||
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
|
||||
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
|
||||
for (MlParserType parserType : MlParserType.values()) {
|
||||
ConstructingObjectParser<AnalysisConfig.Builder, Void> parser = PARSERS.get(parserType);
|
||||
assert parser != null;
|
||||
parser.declareObjectArray(ConstructingObjectParser.constructorArg(),
|
||||
(p, c) -> Detector.PARSERS.get(parserType).apply(p, c).build(), DETECTORS);
|
||||
parser.declareString((builder, val) ->
|
||||
builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), BUCKET_SPAN);
|
||||
parser.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME);
|
||||
parser.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS);
|
||||
parser.declareString((builder, val) ->
|
||||
builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY);
|
||||
parser.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME);
|
||||
parser.declareStringArray(Builder::setInfluencers, INFLUENCERS);
|
||||
parser.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS);
|
||||
parser.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW);
|
||||
parser.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS);
|
||||
parser.declareStringArray((builder, values) -> builder.setMultipleBucketSpans(
|
||||
values.stream().map(v -> TimeValue.parseTimeValue(v, MULTIPLE_BUCKET_SPANS.getPreferredName()))
|
||||
.collect(Collectors.toList())), MULTIPLE_BUCKET_SPANS);
|
||||
parser.declareBoolean(Builder::setUsePerPartitionNormalization, USER_PER_PARTITION_NORMALIZATION);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -10,13 +10,19 @@ import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -31,17 +37,36 @@ public class AnalysisLimits implements ToXContentObject, Writeable {
|
||||
public static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit");
|
||||
public static final ParseField CATEGORIZATION_EXAMPLES_LIMIT = new ParseField("categorization_examples_limit");
|
||||
|
||||
public static final ConstructingObjectParser<AnalysisLimits, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"analysis_limits", a -> new AnalysisLimits((Long) a[0], (Long) a[1]));
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
public static final ConstructingObjectParser<AnalysisLimits, Void> METADATA_PARSER = new ConstructingObjectParser<>(
|
||||
"analysis_limits", true, a -> new AnalysisLimits((Long) a[0], (Long) a[1]));
|
||||
public static final ConstructingObjectParser<AnalysisLimits, Void> CONFIG_PARSER = new ConstructingObjectParser<>(
|
||||
"analysis_limits", false, a -> new AnalysisLimits((Long) a[0], (Long) a[1]));
|
||||
public static final Map<MlParserType, ConstructingObjectParser<AnalysisLimits, Void>> PARSERS =
|
||||
new EnumMap<>(MlParserType.class);
|
||||
|
||||
static {
|
||||
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), MODEL_MEMORY_LIMIT);
|
||||
PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT);
|
||||
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
|
||||
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
|
||||
for (MlParserType parserType : MlParserType.values()) {
|
||||
ConstructingObjectParser<AnalysisLimits, Void> parser = PARSERS.get(parserType);
|
||||
assert parser != null;
|
||||
parser.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()).getMb();
|
||||
} else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||
return p.longValue();
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, MODEL_MEMORY_LIMIT, ObjectParser.ValueType.VALUE);
|
||||
parser.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The model memory limit in MiBs.
|
||||
* It is initialised to <code>null</code>.
|
||||
* A value of <code>null</code> or <code>0</code> will result to the default being used.
|
||||
* A value of <code>null</code> will result to the default being used.
|
||||
*/
|
||||
private final Long modelMemoryLimit;
|
||||
|
||||
@ -52,12 +77,16 @@ public class AnalysisLimits implements ToXContentObject, Writeable {
|
||||
private final Long categorizationExamplesLimit;
|
||||
|
||||
public AnalysisLimits(Long modelMemoryLimit, Long categorizationExamplesLimit) {
|
||||
this.modelMemoryLimit = modelMemoryLimit;
|
||||
if (modelMemoryLimit != null && modelMemoryLimit < 1) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_MODEL_MEMORY_LIMIT_TOO_LOW, modelMemoryLimit);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
if (categorizationExamplesLimit != null && categorizationExamplesLimit < 0) {
|
||||
String msg = Messages.getMessage(Messages.JOB_CONFIG_FIELD_VALUE_TOO_LOW, CATEGORIZATION_EXAMPLES_LIMIT, 0,
|
||||
categorizationExamplesLimit);
|
||||
throw ExceptionsHelper.badRequestException(msg);
|
||||
}
|
||||
this.modelMemoryLimit = modelMemoryLimit;
|
||||
this.categorizationExamplesLimit = categorizationExamplesLimit;
|
||||
}
|
||||
|
||||
@ -97,7 +126,7 @@ public class AnalysisLimits implements ToXContentObject, Writeable {
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (modelMemoryLimit != null) {
|
||||
builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit);
|
||||
builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit + "mb");
|
||||
}
|
||||
if (categorizationExamplesLimit != null) {
|
||||
builder.field(CATEGORIZATION_EXAMPLES_LIMIT.getPreferredName(), categorizationExamplesLimit);
|
||||
|
@ -14,13 +14,16 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.time.DateTimeFormatterTimestampConverter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneOffset;
|
||||
import java.util.EnumMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -122,14 +125,25 @@ public class DataDescription implements ToXContentObject, Writeable {
|
||||
private final Character fieldDelimiter;
|
||||
private final Character quoteCharacter;
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(DATA_DESCRIPTION_FIELD.getPreferredName(), Builder::new);
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
public static final ObjectParser<Builder, Void> METADATA_PARSER =
|
||||
new ObjectParser<>(DATA_DESCRIPTION_FIELD.getPreferredName(), true, Builder::new);
|
||||
public static final ObjectParser<Builder, Void> CONFIG_PARSER =
|
||||
new ObjectParser<>(DATA_DESCRIPTION_FIELD.getPreferredName(), false, Builder::new);
|
||||
public static final Map<MlParserType, ObjectParser<Builder, Void>> PARSERS = new EnumMap<>(MlParserType.class);
|
||||
|
||||
static {
|
||||
PARSER.declareString(Builder::setFormat, FORMAT_FIELD);
|
||||
PARSER.declareString(Builder::setTimeField, TIME_FIELD_NAME_FIELD);
|
||||
PARSER.declareString(Builder::setTimeFormat, TIME_FORMAT_FIELD);
|
||||
PARSER.declareField(Builder::setFieldDelimiter, DataDescription::extractChar, FIELD_DELIMITER_FIELD, ValueType.STRING);
|
||||
PARSER.declareField(Builder::setQuoteCharacter, DataDescription::extractChar, QUOTE_CHARACTER_FIELD, ValueType.STRING);
|
||||
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
|
||||
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
|
||||
for (MlParserType parserType : MlParserType.values()) {
|
||||
ObjectParser<Builder, Void> parser = PARSERS.get(parserType);
|
||||
assert parser != null;
|
||||
parser.declareString(Builder::setFormat, FORMAT_FIELD);
|
||||
parser.declareString(Builder::setTimeField, TIME_FIELD_NAME_FIELD);
|
||||
parser.declareString(Builder::setTimeFormat, TIME_FORMAT_FIELD);
|
||||
parser.declareField(Builder::setFieldDelimiter, DataDescription::extractChar, FIELD_DELIMITER_FIELD, ValueType.STRING);
|
||||
parser.declareField(Builder::setQuoteCharacter, DataDescription::extractChar, QUOTE_CHARACTER_FIELD, ValueType.STRING);
|
||||
}
|
||||
}
|
||||
|
||||
public DataDescription(DataFormat dataFormat, String timeFieldName, String timeFormat, Character fieldDelimiter,
|
||||
|
@ -15,13 +15,16 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
@ -35,25 +38,36 @@ public class DetectionRule implements ToXContentObject, Writeable {
|
||||
public static final ParseField CONDITIONS_CONNECTIVE_FIELD = new ParseField("conditions_connective");
|
||||
public static final ParseField RULE_CONDITIONS_FIELD = new ParseField("rule_conditions");
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), Builder::new);
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
public static final ObjectParser<Builder, Void> METADATA_PARSER =
|
||||
new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), true, Builder::new);
|
||||
public static final ObjectParser<Builder, Void> CONFIG_PARSER =
|
||||
new ObjectParser<>(DETECTION_RULE_FIELD.getPreferredName(), false, Builder::new);
|
||||
public static final Map<MlParserType, ObjectParser<Builder, Void>> PARSERS = new EnumMap<>(MlParserType.class);
|
||||
|
||||
static {
|
||||
PARSER.declareField(Builder::setRuleAction, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return RuleAction.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, RULE_ACTION_FIELD, ValueType.STRING);
|
||||
PARSER.declareString(Builder::setTargetFieldName, TARGET_FIELD_NAME_FIELD);
|
||||
PARSER.declareString(Builder::setTargetFieldValue, TARGET_FIELD_VALUE_FIELD);
|
||||
PARSER.declareField(Builder::setConditionsConnective, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return Connective.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, CONDITIONS_CONNECTIVE_FIELD, ValueType.STRING);
|
||||
PARSER.declareObjectArray(Builder::setRuleConditions,
|
||||
(parser, parseFieldMatcher) -> RuleCondition.PARSER.apply(parser, parseFieldMatcher), RULE_CONDITIONS_FIELD);
|
||||
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
|
||||
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
|
||||
for (MlParserType parserType : MlParserType.values()) {
|
||||
ObjectParser<Builder, Void> parser = PARSERS.get(parserType);
|
||||
assert parser != null;
|
||||
parser.declareField(Builder::setRuleAction, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return RuleAction.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, RULE_ACTION_FIELD, ValueType.STRING);
|
||||
parser.declareString(Builder::setTargetFieldName, TARGET_FIELD_NAME_FIELD);
|
||||
parser.declareString(Builder::setTargetFieldValue, TARGET_FIELD_VALUE_FIELD);
|
||||
parser.declareField(Builder::setConditionsConnective, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return Connective.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, CONDITIONS_CONNECTIVE_FIELD, ValueType.STRING);
|
||||
parser.declareObjectArray(Builder::setRuleConditions, (p, c) ->
|
||||
RuleCondition.PARSERS.get(parserType).apply(p, c), RULE_CONDITIONS_FIELD);
|
||||
}
|
||||
}
|
||||
|
||||
private final RuleAction ruleAction;
|
||||
|
@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.writer.RecordWriter;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
@ -25,9 +26,11 @@ import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
@ -83,25 +86,34 @@ public class Detector implements ToXContentObject, Writeable {
|
||||
public static final ParseField DETECTOR_RULES_FIELD = new ParseField("detector_rules");
|
||||
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("detector", Builder::new);
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
public static final ObjectParser<Builder, Void> METADATA_PARSER = new ObjectParser<>("detector", true, Builder::new);
|
||||
public static final ObjectParser<Builder, Void> CONFIG_PARSER = new ObjectParser<>("detector", false, Builder::new);
|
||||
public static final Map<MlParserType, ObjectParser<Builder, Void>> PARSERS = new EnumMap<>(MlParserType.class);
|
||||
|
||||
static {
|
||||
PARSER.declareString(Builder::setDetectorDescription, DETECTOR_DESCRIPTION_FIELD);
|
||||
PARSER.declareString(Builder::setFunction, FUNCTION_FIELD);
|
||||
PARSER.declareString(Builder::setFieldName, FIELD_NAME_FIELD);
|
||||
PARSER.declareString(Builder::setByFieldName, BY_FIELD_NAME_FIELD);
|
||||
PARSER.declareString(Builder::setOverFieldName, OVER_FIELD_NAME_FIELD);
|
||||
PARSER.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD);
|
||||
PARSER.declareBoolean(Builder::setUseNull, USE_NULL_FIELD);
|
||||
PARSER.declareField(Builder::setExcludeFrequent, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return ExcludeFrequent.forString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, EXCLUDE_FREQUENT_FIELD, ObjectParser.ValueType.STRING);
|
||||
PARSER.declareObjectArray(Builder::setDetectorRules,
|
||||
(parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), DETECTOR_RULES_FIELD);
|
||||
PARSER.declareInt(Builder::setDetectorIndex, DETECTOR_INDEX);
|
||||
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
|
||||
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
|
||||
for (MlParserType parserType : MlParserType.values()) {
|
||||
ObjectParser<Builder, Void> parser = PARSERS.get(parserType);
|
||||
assert parser != null;
|
||||
parser.declareString(Builder::setDetectorDescription, DETECTOR_DESCRIPTION_FIELD);
|
||||
parser.declareString(Builder::setFunction, FUNCTION_FIELD);
|
||||
parser.declareString(Builder::setFieldName, FIELD_NAME_FIELD);
|
||||
parser.declareString(Builder::setByFieldName, BY_FIELD_NAME_FIELD);
|
||||
parser.declareString(Builder::setOverFieldName, OVER_FIELD_NAME_FIELD);
|
||||
parser.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD);
|
||||
parser.declareBoolean(Builder::setUseNull, USE_NULL_FIELD);
|
||||
parser.declareField(Builder::setExcludeFrequent, p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return ExcludeFrequent.forString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, EXCLUDE_FREQUENT_FIELD, ObjectParser.ValueType.STRING);
|
||||
parser.declareObjectArray(Builder::setDetectorRules, (p, c) ->
|
||||
DetectionRule.PARSERS.get(parserType).apply(p, c).build(), DETECTOR_RULES_FIELD);
|
||||
parser.declareInt(Builder::setDetectorIndex, DETECTOR_INDEX);
|
||||
}
|
||||
}
|
||||
|
||||
public static final String COUNT = "count";
|
||||
|
@ -20,6 +20,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
@ -29,6 +30,7 @@ import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -77,55 +79,65 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
||||
|
||||
public static final String ALL = "_all";
|
||||
|
||||
public static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("job_details", Builder::new);
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
public static final ObjectParser<Builder, Void> METADATA_PARSER = new ObjectParser<>("job_details", true, Builder::new);
|
||||
public static final ObjectParser<Builder, Void> CONFIG_PARSER = new ObjectParser<>("job_details", false, Builder::new);
|
||||
public static final Map<MlParserType, ObjectParser<Builder, Void>> PARSERS = new EnumMap<>(MlParserType.class);
|
||||
|
||||
public static final int MAX_JOB_ID_LENGTH = 64;
|
||||
public static final TimeValue MIN_BACKGROUND_PERSIST_INTERVAL = TimeValue.timeValueHours(1);
|
||||
|
||||
static {
|
||||
PARSER.declareString(Builder::setId, ID);
|
||||
PARSER.declareString(Builder::setJobType, JOB_TYPE);
|
||||
PARSER.declareString(Builder::setJobVersion, JOB_VERSION);
|
||||
PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION);
|
||||
PARSER.declareField(Builder::setCreateTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(TimeUtils.dateStringToEpoch(p.text()));
|
||||
}
|
||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + CREATE_TIME.getPreferredName() + "]");
|
||||
}, CREATE_TIME, ValueType.VALUE);
|
||||
PARSER.declareField(Builder::setFinishedTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(TimeUtils.dateStringToEpoch(p.text()));
|
||||
}
|
||||
throw new IllegalArgumentException(
|
||||
"unexpected token [" + p.currentToken() + "] for [" + FINISHED_TIME.getPreferredName() + "]");
|
||||
}, FINISHED_TIME, ValueType.VALUE);
|
||||
PARSER.declareField(Builder::setLastDataTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(TimeUtils.dateStringToEpoch(p.text()));
|
||||
}
|
||||
throw new IllegalArgumentException(
|
||||
"unexpected token [" + p.currentToken() + "] for [" + LAST_DATA_TIME.getPreferredName() + "]");
|
||||
}, LAST_DATA_TIME, ValueType.VALUE);
|
||||
PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG);
|
||||
PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS);
|
||||
PARSER.declareObject(Builder::setDataDescription, DataDescription.PARSER, DATA_DESCRIPTION);
|
||||
PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, MODEL_PLOT_CONFIG);
|
||||
PARSER.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS);
|
||||
PARSER.declareString((builder, val) -> builder.setBackgroundPersistInterval(
|
||||
TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName())), BACKGROUND_PERSIST_INTERVAL);
|
||||
PARSER.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS);
|
||||
PARSER.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS);
|
||||
PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT);
|
||||
PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID);
|
||||
PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME);
|
||||
PARSER.declareBoolean(Builder::setDeleted, DELETED);
|
||||
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
|
||||
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
|
||||
for (MlParserType parserType : MlParserType.values()) {
|
||||
ObjectParser<Builder, Void> parser = PARSERS.get(parserType);
|
||||
assert parser != null;
|
||||
parser.declareString(Builder::setId, ID);
|
||||
parser.declareString(Builder::setJobType, JOB_TYPE);
|
||||
parser.declareString(Builder::setJobVersion, JOB_VERSION);
|
||||
parser.declareStringOrNull(Builder::setDescription, DESCRIPTION);
|
||||
parser.declareField(Builder::setCreateTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(TimeUtils.dateStringToEpoch(p.text()));
|
||||
}
|
||||
throw new IllegalArgumentException("unexpected token [" + p.currentToken() +
|
||||
"] for [" + CREATE_TIME.getPreferredName() + "]");
|
||||
}, CREATE_TIME, ValueType.VALUE);
|
||||
parser.declareField(Builder::setFinishedTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(TimeUtils.dateStringToEpoch(p.text()));
|
||||
}
|
||||
throw new IllegalArgumentException(
|
||||
"unexpected token [" + p.currentToken() + "] for [" + FINISHED_TIME.getPreferredName() + "]");
|
||||
}, FINISHED_TIME, ValueType.VALUE);
|
||||
parser.declareField(Builder::setLastDataTime, p -> {
|
||||
if (p.currentToken() == Token.VALUE_NUMBER) {
|
||||
return new Date(p.longValue());
|
||||
} else if (p.currentToken() == Token.VALUE_STRING) {
|
||||
return new Date(TimeUtils.dateStringToEpoch(p.text()));
|
||||
}
|
||||
throw new IllegalArgumentException(
|
||||
"unexpected token [" + p.currentToken() + "] for [" + LAST_DATA_TIME.getPreferredName() + "]");
|
||||
}, LAST_DATA_TIME, ValueType.VALUE);
|
||||
parser.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSERS.get(parserType), ANALYSIS_CONFIG);
|
||||
parser.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSERS.get(parserType), ANALYSIS_LIMITS);
|
||||
parser.declareObject(Builder::setDataDescription, DataDescription.PARSERS.get(parserType), DATA_DESCRIPTION);
|
||||
parser.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSERS.get(parserType), MODEL_PLOT_CONFIG);
|
||||
parser.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS);
|
||||
parser.declareString((builder, val) -> builder.setBackgroundPersistInterval(
|
||||
TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName())), BACKGROUND_PERSIST_INTERVAL);
|
||||
parser.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS);
|
||||
parser.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS);
|
||||
parser.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT);
|
||||
parser.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID);
|
||||
parser.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME);
|
||||
parser.declareBoolean(Builder::setDeleted, DELETED);
|
||||
}
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
|
@ -27,14 +27,14 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
||||
public static final ParseField DETECTORS = new ParseField("detectors");
|
||||
|
||||
public static final ConstructingObjectParser<Builder, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"job_update", args -> new Builder((String) args[0]));
|
||||
"job_update", args -> new Builder((String) args[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Job.ID);
|
||||
PARSER.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION);
|
||||
PARSER.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS);
|
||||
PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, Job.MODEL_PLOT_CONFIG);
|
||||
PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, Job.ANALYSIS_LIMITS);
|
||||
PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.CONFIG_PARSER, Job.MODEL_PLOT_CONFIG);
|
||||
PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.CONFIG_PARSER, Job.ANALYSIS_LIMITS);
|
||||
PARSER.declareString((builder, val) -> builder.setBackgroundPersistInterval(
|
||||
TimeValue.parseTimeValue(val, Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName())), Job.BACKGROUND_PERSIST_INTERVAL);
|
||||
PARSER.declareLong(Builder::setRenormalizationWindowDays, Job.RENORMALIZATION_WINDOW_DAYS);
|
||||
@ -326,7 +326,7 @@ public class JobUpdate implements Writeable, ToXContentObject {
|
||||
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), Detector.DETECTOR_INDEX);
|
||||
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION);
|
||||
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(),
|
||||
(parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), RULES);
|
||||
(parser, parseFieldMatcher) -> DetectionRule.CONFIG_PARSER.apply(parser, parseFieldMatcher).build(), RULES);
|
||||
}
|
||||
|
||||
private int detectorIndex;
|
||||
|
@ -12,8 +12,11 @@ import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ModelPlotConfig implements ToXContentObject, Writeable {
|
||||
@ -22,13 +25,25 @@ public class ModelPlotConfig implements ToXContentObject, Writeable {
|
||||
private static final ParseField ENABLED_FIELD = new ParseField("enabled");
|
||||
public static final ParseField TERMS_FIELD = new ParseField("terms");
|
||||
|
||||
public static final ConstructingObjectParser<ModelPlotConfig, Void> PARSER =
|
||||
new ConstructingObjectParser<>(TYPE_FIELD.getPreferredName(),
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
public static final ConstructingObjectParser<ModelPlotConfig, Void> METADATA_PARSER =
|
||||
new ConstructingObjectParser<>(TYPE_FIELD.getPreferredName(), true,
|
||||
a -> new ModelPlotConfig((boolean) a[0], (String) a[1]));
|
||||
public static final ConstructingObjectParser<ModelPlotConfig, Void> CONFIG_PARSER =
|
||||
new ConstructingObjectParser<>(TYPE_FIELD.getPreferredName(), false,
|
||||
a -> new ModelPlotConfig((boolean) a[0], (String) a[1]));
|
||||
public static final Map<MlParserType, ConstructingObjectParser<ModelPlotConfig, Void>> PARSERS =
|
||||
new EnumMap<>(MlParserType.class);
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD);
|
||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TERMS_FIELD);
|
||||
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
|
||||
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
|
||||
for (MlParserType parserType : MlParserType.values()) {
|
||||
ConstructingObjectParser<ModelPlotConfig, Void> parser = PARSERS.get(parserType);
|
||||
assert parser != null;
|
||||
parser.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD);
|
||||
parser.declareString(ConstructingObjectParser.optionalConstructorArg(), TERMS_FIELD);
|
||||
}
|
||||
}
|
||||
|
||||
private final boolean enabled;
|
||||
|
@ -15,11 +15,14 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumMap;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class RuleCondition implements ToXContentObject, Writeable {
|
||||
@ -29,21 +32,33 @@ public class RuleCondition implements ToXContentObject, Writeable {
|
||||
public static final ParseField FIELD_VALUE_FIELD = new ParseField("field_value");
|
||||
public static final ParseField VALUE_FILTER_FIELD = new ParseField("value_filter");
|
||||
|
||||
public static final ConstructingObjectParser<RuleCondition, Void> PARSER =
|
||||
new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(),
|
||||
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
|
||||
public static final ConstructingObjectParser<RuleCondition, Void> METADATA_PARSER =
|
||||
new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(), true,
|
||||
a -> new RuleCondition((RuleConditionType) a[0], (String) a[1], (String) a[2], (Condition) a[3], (String) a[4]));
|
||||
public static final ConstructingObjectParser<RuleCondition, Void> CONFIG_PARSER =
|
||||
new ConstructingObjectParser<>(RULE_CONDITION_FIELD.getPreferredName(), false,
|
||||
a -> new RuleCondition((RuleConditionType) a[0], (String) a[1], (String) a[2], (Condition) a[3], (String) a[4]));
|
||||
public static final Map<MlParserType, ConstructingObjectParser<RuleCondition, Void>> PARSERS =
|
||||
new EnumMap<>(MlParserType.class);
|
||||
|
||||
static {
|
||||
PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return RuleConditionType.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, CONDITION_TYPE_FIELD, ValueType.STRING);
|
||||
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_NAME_FIELD);
|
||||
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_VALUE_FIELD);
|
||||
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Condition.PARSER, Condition.CONDITION_FIELD);
|
||||
PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), VALUE_FILTER_FIELD);
|
||||
PARSERS.put(MlParserType.METADATA, METADATA_PARSER);
|
||||
PARSERS.put(MlParserType.CONFIG, CONFIG_PARSER);
|
||||
for (MlParserType parserType : MlParserType.values()) {
|
||||
ConstructingObjectParser<RuleCondition, Void> parser = PARSERS.get(parserType);
|
||||
assert parser != null;
|
||||
parser.declareField(ConstructingObjectParser.constructorArg(), p -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return RuleConditionType.fromString(p.text());
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||
}, CONDITION_TYPE_FIELD, ValueType.STRING);
|
||||
parser.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_NAME_FIELD);
|
||||
parser.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), FIELD_VALUE_FIELD);
|
||||
parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), Condition.PARSER, Condition.CONDITION_FIELD);
|
||||
parser.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), VALUE_FILTER_FIELD);
|
||||
}
|
||||
}
|
||||
|
||||
private final RuleConditionType conditionType;
|
||||
|
@ -93,6 +93,7 @@ public final class Messages {
|
||||
"Invalid detector rule: at least one rule_condition is required";
|
||||
public static final String JOB_CONFIG_FIELDNAME_INCOMPATIBLE_FUNCTION = "field_name cannot be used with function ''{0}''";
|
||||
public static final String JOB_CONFIG_FIELD_VALUE_TOO_LOW = "{0} cannot be less than {1,number}. Value = {2,number}";
|
||||
public static final String JOB_CONFIG_MODEL_MEMORY_LIMIT_TOO_LOW = "model_memory_limit must be at least 1 MiB. Value = {0,number}";
|
||||
public static final String JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED =
|
||||
"The ''{0}'' function cannot be used in jobs that will take pre-summarized input";
|
||||
public static final String JOB_CONFIG_FUNCTION_REQUIRES_BYFIELD = "by_field_name must be set when the ''{0}'' function is used";
|
||||
|
@ -33,16 +33,13 @@ public class AnalysisLimitsWriter {
|
||||
|
||||
public void write() throws IOException {
|
||||
StringBuilder contents = new StringBuilder(MEMORY_STANZA_STR).append(NEW_LINE);
|
||||
if (limits.getModelMemoryLimit() != null && limits.getModelMemoryLimit() != 0L) {
|
||||
contents.append(MODEL_MEMORY_LIMIT_CONFIG_STR + EQUALS)
|
||||
.append(limits.getModelMemoryLimit()).append(NEW_LINE);
|
||||
if (limits.getModelMemoryLimit() != null) {
|
||||
contents.append(MODEL_MEMORY_LIMIT_CONFIG_STR + EQUALS).append(limits.getModelMemoryLimit()).append(NEW_LINE);
|
||||
}
|
||||
|
||||
contents.append(RESULTS_STANZA_STR).append(NEW_LINE);
|
||||
if (limits.getCategorizationExamplesLimit() != null) {
|
||||
contents.append(MAX_EXAMPLES_LIMIT_CONFIG_STR + EQUALS)
|
||||
.append(limits.getCategorizationExamplesLimit())
|
||||
.append(NEW_LINE);
|
||||
contents.append(MAX_EXAMPLES_LIMIT_CONFIG_STR + EQUALS).append(limits.getCategorizationExamplesLimit()).append(NEW_LINE);
|
||||
}
|
||||
|
||||
writer.write(contents.toString());
|
||||
|
@ -154,6 +154,7 @@ import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4HttpServe
|
||||
import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4Transport;
|
||||
import org.elasticsearch.xpack.security.user.AnonymousUser;
|
||||
import org.elasticsearch.xpack.ssl.SSLBootstrapCheck;
|
||||
import org.elasticsearch.xpack.ssl.SSLConfigurationSettings;
|
||||
import org.elasticsearch.xpack.ssl.SSLService;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
@ -179,6 +180,7 @@ import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.elasticsearch.common.settings.Setting.groupSetting;
|
||||
import static org.elasticsearch.xpack.XPackSettings.HTTP_SSL_ENABLED;
|
||||
|
||||
public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin {
|
||||
@ -463,6 +465,8 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin {
|
||||
settingsList.add(TokenService.TOKEN_PASSPHRASE);
|
||||
settingsList.add(TokenService.DELETE_INTERVAL);
|
||||
settingsList.add(TokenService.DELETE_TIMEOUT);
|
||||
settingsList.add(SecurityServerTransportInterceptor.TRANSPORT_TYPE_PROFILE_SETTING);
|
||||
settingsList.addAll(SSLConfigurationSettings.getProfileSettings());
|
||||
|
||||
// hide settings
|
||||
settingsList.add(Setting.listSetting(setting("hide_settings"), Collections.emptyList(), Function.identity(),
|
||||
|
@ -50,6 +50,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm {
|
||||
|
||||
public static final SecureString EMPTY_PASSWORD_TEXT = new SecureString("".toCharArray());
|
||||
static final char[] EMPTY_PASSWORD_HASH = Hasher.BCRYPT.hash(EMPTY_PASSWORD_TEXT);
|
||||
static final char[] OLD_DEFAULT_PASSWORD_HASH = Hasher.BCRYPT.hash(new SecureString("changeme".toCharArray()));
|
||||
|
||||
private static final ReservedUserInfo DEFAULT_USER_INFO = new ReservedUserInfo(EMPTY_PASSWORD_HASH, true, true);
|
||||
private static final ReservedUserInfo DISABLED_USER_INFO = new ReservedUserInfo(EMPTY_PASSWORD_HASH, false, true);
|
||||
@ -102,9 +103,20 @@ public class ReservedRealm extends CachingUsernamePasswordRealm {
|
||||
Runnable action;
|
||||
if (userInfo != null) {
|
||||
try {
|
||||
if (userInfo.hasEmptyPassword && isSetupMode(token.principal(), acceptEmptyPassword) == false) {
|
||||
action = () -> listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]",
|
||||
token.principal()));
|
||||
if (userInfo.hasEmptyPassword) {
|
||||
// norelease
|
||||
// Accepting the OLD_DEFAULT_PASSWORD_HASH is a transition step. We do not want to support
|
||||
// this in a release.
|
||||
if (isSetupMode(token.principal(), acceptEmptyPassword) == false) {
|
||||
action = () -> listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]",
|
||||
token.principal()));
|
||||
} else if (verifyPassword(userInfo, token)
|
||||
|| Hasher.BCRYPT.verify(token.credentials(), OLD_DEFAULT_PASSWORD_HASH)) {
|
||||
action = () -> listener.onResponse(getUser(token.principal(), userInfo));
|
||||
} else {
|
||||
action = () -> listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]",
|
||||
token.principal()));
|
||||
}
|
||||
} else if (verifyPassword(userInfo, token)) {
|
||||
final User user = getUser(token.principal(), userInfo);
|
||||
action = () -> listener.onResponse(user);
|
||||
@ -113,7 +125,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm {
|
||||
token.principal()));
|
||||
}
|
||||
} finally {
|
||||
if (userInfo.passwordHash != EMPTY_PASSWORD_HASH) {
|
||||
if (userInfo.passwordHash != EMPTY_PASSWORD_HASH && userInfo.passwordHash != OLD_DEFAULT_PASSWORD_HASH) {
|
||||
Arrays.fill(userInfo.passwordHash, (char) 0);
|
||||
}
|
||||
}
|
||||
|
@ -152,11 +152,14 @@ public class AuthorizationService extends AbstractComponent {
|
||||
throw denial(authentication, action, request);
|
||||
}
|
||||
|
||||
// norelease
|
||||
// TODO: This functionality is disabled as it is not yet compatible with the upgrade process
|
||||
// If the user is the elastic user in setup mode, then only change password requests can be authorized
|
||||
if (ElasticUser.isElasticUserInSetupMode(authentication.getUser())
|
||||
&& ChangePasswordAction.NAME.equals(action) == false) {
|
||||
throw denial(authentication, action, request);
|
||||
}
|
||||
// if (ElasticUser.isElasticUserInSetupMode(authentication.getUser())
|
||||
// && ChangePasswordAction.NAME.equals(action) == false
|
||||
// && ClusterHealthAction.NAME.equals(action) == false) {
|
||||
// throw denial(authentication, action, request);
|
||||
// }
|
||||
|
||||
// get the roles of the authenticated user, which may be different than the effective
|
||||
Role permission = userRole;
|
||||
|
@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.DestructiveOperations;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
@ -44,12 +45,24 @@ import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.xpack.security.Security.setting;
|
||||
|
||||
public class SecurityServerTransportInterceptor extends AbstractComponent implements TransportInterceptor {
|
||||
|
||||
private static final String SETTING_NAME = "xpack.security.type";
|
||||
private static final Function<String, Setting<String>> TRANSPORT_TYPE_SETTING_TEMPLATE = (key) -> new Setting<>(key,
|
||||
"node", v
|
||||
-> {
|
||||
if (v.equals("node") || v.equals("client")) {
|
||||
return v;
|
||||
}
|
||||
throw new IllegalArgumentException("type must be one of [client, node]");
|
||||
}, Setting.Property.NodeScope);
|
||||
private static final String TRANSPORT_TYPE_SETTING_KEY = "xpack.security.type";
|
||||
|
||||
public static final Setting<String> TRANSPORT_TYPE_PROFILE_SETTING = Setting.affixKeySetting("transport.profiles.",
|
||||
TRANSPORT_TYPE_SETTING_KEY, TRANSPORT_TYPE_SETTING_TEMPLATE);
|
||||
|
||||
private final AuthenticationService authcService;
|
||||
private final AuthorizationService authzService;
|
||||
@ -154,17 +167,20 @@ public class SecurityServerTransportInterceptor extends AbstractComponent implem
|
||||
Settings profileSettings = entry.getValue();
|
||||
final Settings profileSslSettings = SecurityNetty4Transport.profileSslSettings(profileSettings);
|
||||
final boolean extractClientCert = sslService.isSSLClientAuthEnabled(profileSslSettings, transportSSLSettings);
|
||||
String type = entry.getValue().get(SETTING_NAME, "node");
|
||||
String type = TRANSPORT_TYPE_SETTING_TEMPLATE.apply(TRANSPORT_TYPE_SETTING_KEY).get(entry.getValue());
|
||||
switch (type) {
|
||||
case "client":
|
||||
profileFilters.put(entry.getKey(), new ServerTransportFilter.ClientProfile(authcService, authzService,
|
||||
threadPool.getThreadContext(), extractClientCert, destructiveOperations, reservedRealmEnabled,
|
||||
securityContext));
|
||||
break;
|
||||
default:
|
||||
case "node":
|
||||
profileFilters.put(entry.getKey(), new ServerTransportFilter.NodeProfile(authcService, authzService,
|
||||
threadPool.getThreadContext(), extractClientCert, destructiveOperations, reservedRealmEnabled,
|
||||
securityContext));
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("unknown profile type: " + type);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,12 +23,13 @@ import org.elasticsearch.xpack.security.audit.AuditTrailService;
|
||||
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.xpack.security.Security.setting;
|
||||
@ -58,6 +59,13 @@ public class IPFilter {
|
||||
public static final Setting<List<String>> TRANSPORT_FILTER_DENY_SETTING = Setting.listSetting(setting("transport.filter.deny"),
|
||||
Collections.emptyList(), Function.identity(), Property.Dynamic, Property.NodeScope);
|
||||
|
||||
public static final Setting.AffixSetting<List<String>> PROFILE_FILTER_DENY_SETTING = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.filter.deny", key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(),
|
||||
Property.Dynamic, Property.NodeScope));
|
||||
public static final Setting.AffixSetting<List<String>> PROFILE_FILTER_ALLOW_SETTING = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.filter.allow", key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(),
|
||||
Property.Dynamic, Property.NodeScope));
|
||||
|
||||
private static final Setting<List<String>> HTTP_FILTER_ALLOW_FALLBACK =
|
||||
Setting.listSetting("transport.profiles.default.xpack.security.filter.allow", TRANSPORT_FILTER_ALLOW_SETTING, s -> s,
|
||||
Property.NodeScope);
|
||||
@ -96,7 +104,7 @@ public class IPFilter {
|
||||
private volatile Map<String, SecurityIpFilterRule[]> rules = Collections.emptyMap();
|
||||
private volatile boolean isIpFilterEnabled;
|
||||
private volatile boolean isHttpFilterEnabled;
|
||||
private volatile Map<String, Settings> transportGroups;
|
||||
private final Set<String> profiles;
|
||||
private volatile List<String> transportAllowFilter;
|
||||
private volatile List<String> transportDenyFilter;
|
||||
private volatile List<String> httpAllowFilter;
|
||||
@ -104,6 +112,8 @@ public class IPFilter {
|
||||
private final SetOnce<BoundTransportAddress> boundTransportAddress = new SetOnce<>();
|
||||
private final SetOnce<BoundTransportAddress> boundHttpTransportAddress = new SetOnce<>();
|
||||
private final SetOnce<Map<String, BoundTransportAddress>> profileBoundAddress = new SetOnce<>();
|
||||
private final Map<String, List<String>> profileAllowRules = Collections.synchronizedMap(new HashMap<>());
|
||||
private final Map<String, List<String>> profileDenyRules = Collections.synchronizedMap(new HashMap<>());
|
||||
|
||||
public IPFilter(final Settings settings, AuditTrailService auditTrail, ClusterSettings clusterSettings,
|
||||
XPackLicenseState licenseState) {
|
||||
@ -118,15 +128,22 @@ public class IPFilter {
|
||||
isHttpFilterEnabled = IP_FILTER_ENABLED_HTTP_SETTING.get(settings);
|
||||
isIpFilterEnabled = IP_FILTER_ENABLED_SETTING.get(settings);
|
||||
|
||||
this.transportGroups = TcpTransport.TRANSPORT_PROFILES_SETTING.get(settings).getAsGroups(); // this is pretty crazy that we
|
||||
// allow this to be updateable!!! - we have to fix this very soon
|
||||
this.profiles = settings.getGroups("transport.profiles.",true).keySet().stream().filter(k -> TcpTransport
|
||||
.DEFAULT_PROFILE.equals(k) == false).collect(Collectors.toSet()); // exclude default profile -- it's handled differently
|
||||
for (String profile : profiles) {
|
||||
Setting<List<String>> allowSetting = PROFILE_FILTER_ALLOW_SETTING.getConcreteSettingForNamespace(profile);
|
||||
profileAllowRules.put(profile, allowSetting.get(settings));
|
||||
Setting<List<String>> denySetting = PROFILE_FILTER_DENY_SETTING.getConcreteSettingForNamespace(profile);
|
||||
profileDenyRules.put(profile, denySetting.get(settings));
|
||||
}
|
||||
clusterSettings.addSettingsUpdateConsumer(IP_FILTER_ENABLED_HTTP_SETTING, this::setHttpFiltering);
|
||||
clusterSettings.addSettingsUpdateConsumer(IP_FILTER_ENABLED_SETTING, this::setTransportFiltering);
|
||||
clusterSettings.addSettingsUpdateConsumer(TRANSPORT_FILTER_ALLOW_SETTING, this::setTransportAllowFilter);
|
||||
clusterSettings.addSettingsUpdateConsumer(TRANSPORT_FILTER_DENY_SETTING, this::setTransportDenyFilter);
|
||||
clusterSettings.addSettingsUpdateConsumer(HTTP_FILTER_ALLOW_SETTING, this::setHttpAllowFilter);
|
||||
clusterSettings.addSettingsUpdateConsumer(HTTP_FILTER_DENY_SETTING, this::setHttpDenyFilter);
|
||||
clusterSettings.addSettingsUpdateConsumer(TcpTransport.TRANSPORT_PROFILES_SETTING, this::setTransportProfiles);
|
||||
clusterSettings.addAffixUpdateConsumer(PROFILE_FILTER_ALLOW_SETTING, this::setProfileAllowRules, (a,b) -> {});
|
||||
clusterSettings.addAffixUpdateConsumer(PROFILE_FILTER_DENY_SETTING, this::setProfileDenyRules, (a,b) -> {});
|
||||
updateRules();
|
||||
}
|
||||
|
||||
@ -140,8 +157,13 @@ public class IPFilter {
|
||||
return map;
|
||||
}
|
||||
|
||||
private void setTransportProfiles(Settings settings) {
|
||||
transportGroups = settings.getAsGroups();
|
||||
private void setProfileAllowRules(String profile, List<String> rules) {
|
||||
profileAllowRules.put(profile, rules);
|
||||
updateRules();
|
||||
}
|
||||
|
||||
private void setProfileDenyRules(String profile, List<String> rules) {
|
||||
profileDenyRules.put(profile, rules);
|
||||
updateRules();
|
||||
}
|
||||
|
||||
@ -215,18 +237,17 @@ public class IPFilter {
|
||||
|
||||
if (isIpFilterEnabled && boundTransportAddress.get() != null) {
|
||||
TransportAddress[] localAddresses = boundTransportAddress.get().boundAddresses();
|
||||
profileRules.put("default", createRules(transportAllowFilter, transportDenyFilter, localAddresses));
|
||||
for (Map.Entry<String, Settings> entry : transportGroups.entrySet()) {
|
||||
String profile = entry.getKey();
|
||||
profileRules.put(TcpTransport.DEFAULT_PROFILE, createRules(transportAllowFilter, transportDenyFilter, localAddresses));
|
||||
for (String profile : profiles) {
|
||||
BoundTransportAddress profileBoundTransportAddress = profileBoundAddress.get().get(profile);
|
||||
if (profileBoundTransportAddress == null) {
|
||||
// this could happen if a user updates the settings dynamically with a new profile
|
||||
logger.warn("skipping ip filter rules for profile [{}] since the profile is not bound to any addresses", profile);
|
||||
continue;
|
||||
}
|
||||
Settings profileSettings = entry.getValue().getByPrefix(setting("filter."));
|
||||
profileRules.put(profile, createRules(Arrays.asList(profileSettings.getAsArray("allow")),
|
||||
Arrays.asList(profileSettings.getAsArray("deny")), profileBoundTransportAddress.boundAddresses()));
|
||||
final List<String> allowRules = this.profileAllowRules.getOrDefault(profile, Collections.emptyList());
|
||||
final List<String> denyRules = this.profileDenyRules.getOrDefault(profile, Collections.emptyList());
|
||||
profileRules.put(profile, createRules(allowRules, denyRules, profileBoundTransportAddress.boundAddresses()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -277,5 +298,7 @@ public class IPFilter {
|
||||
settings.add(HTTP_FILTER_DENY_SETTING);
|
||||
settings.add(TRANSPORT_FILTER_ALLOW_SETTING);
|
||||
settings.add(TRANSPORT_FILTER_DENY_SETTING);
|
||||
settings.add(PROFILE_FILTER_ALLOW_SETTING);
|
||||
settings.add(PROFILE_FILTER_DENY_SETTING);
|
||||
}
|
||||
}
|
||||
|
@ -82,12 +82,12 @@ public class SecurityNetty4Transport extends Netty4Transport {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ChannelHandler getServerChannelInitializer(String name, Settings settings) {
|
||||
protected ChannelHandler getServerChannelInitializer(String name) {
|
||||
SSLConfiguration configuration = profileConfiguration.get(name);
|
||||
if (configuration == null) {
|
||||
throw new IllegalStateException("unknown profile: " + name);
|
||||
}
|
||||
return new SecurityServerChannelInitializer(settings, name, configuration);
|
||||
return new SecurityServerChannelInitializer(name, configuration);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -130,8 +130,8 @@ public class SecurityNetty4Transport extends Netty4Transport {
|
||||
class SecurityServerChannelInitializer extends ServerChannelInitializer {
|
||||
private final SSLConfiguration configuration;
|
||||
|
||||
SecurityServerChannelInitializer(Settings settings, String name, SSLConfiguration configuration) {
|
||||
super(name, settings);
|
||||
SecurityServerChannelInitializer(String name, SSLConfiguration configuration) {
|
||||
super(name);
|
||||
this.configuration = configuration;
|
||||
|
||||
}
|
||||
|
@ -7,8 +7,9 @@ package org.elasticsearch.xpack.ssl;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
@ -50,6 +51,108 @@ public class SSLConfigurationSettings {
|
||||
|
||||
private final List<Setting<?>> allSettings;
|
||||
|
||||
private static final Function<String, Setting<List<String>>> CIPHERS_SETTING_TEMPLATE = key -> Setting.listSetting(key, Collections
|
||||
.emptyList(), Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<List<String>> CIPHERS_SETTING_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.cipher_suites", CIPHERS_SETTING_TEMPLATE);
|
||||
|
||||
private static final Function<String,Setting<List<String>>> SUPPORTED_PROTOCOLS_TEMPLATE = key -> Setting.listSetting(key,
|
||||
Collections.emptyList(), Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<List<String>> SUPPORTED_PROTOCOLS_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.supported_protocols", SUPPORTED_PROTOCOLS_TEMPLATE) ;
|
||||
|
||||
private static final Function<String, Setting<Optional<String>>> KEYSTORE_PATH_TEMPLATE = key -> new Setting<>(key, s -> null,
|
||||
Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<Optional<String>> KEYSTORE_PATH_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.keystore.path", KEYSTORE_PATH_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<SecureString>> LEGACY_KEYSTORE_PASSWORD_TEMPLATE = key -> new Setting<>(key, "",
|
||||
SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope);
|
||||
public static final Setting<SecureString> LEGACY_KEYSTORE_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.keystore.password", LEGACY_KEYSTORE_PASSWORD_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<SecureString>> KEYSTORE_PASSWORD_TEMPLATE = key -> SecureSetting.secureString(key,
|
||||
LEGACY_KEYSTORE_PASSWORD_TEMPLATE.apply(key.replace("keystore.secure_password", "keystore.password")));
|
||||
public static final Setting<SecureString> KEYSTORE_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.keystore.secure_password", KEYSTORE_PASSWORD_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<SecureString>> LEGACY_KEYSTORE_KEY_PASSWORD_TEMPLATE = key -> new Setting<>(key, "",
|
||||
SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope);
|
||||
public static final Setting<SecureString> LEGACY_KEYSTORE_KEY_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.keystore.key_password", LEGACY_KEYSTORE_KEY_PASSWORD_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<SecureString>> KEYSTORE_KEY_PASSWORD_TEMPLATE = key ->
|
||||
SecureSetting.secureString(key, LEGACY_KEYSTORE_KEY_PASSWORD_TEMPLATE.apply(key.replace("keystore.secure_key_password",
|
||||
"keystore.key_password")));
|
||||
public static final Setting<SecureString> KEYSTORE_KEY_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.keystore.secure_key_password", KEYSTORE_KEY_PASSWORD_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<Optional<String>>> TRUST_STORE_PATH_TEMPLATE = key -> new Setting<>(key, s -> null,
|
||||
Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<Optional<String>> TRUST_STORE_PATH_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.truststore.path", TRUST_STORE_PATH_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<Optional<String>>> KEY_PATH_TEMPLATE = key -> new Setting<>(key, s -> null,
|
||||
Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<Optional<String>> KEY_PATH_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.key", KEY_PATH_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<SecureString>> LEGACY_TRUSTSTORE_PASSWORD_TEMPLATE = key ->
|
||||
new Setting<>(key, "", SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope);
|
||||
public static final Setting<SecureString> LEGACY_TRUSTSTORE_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.truststore.password", LEGACY_TRUSTSTORE_PASSWORD_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<SecureString>> TRUSTSTORE_PASSWORD_TEMPLATE = key ->
|
||||
SecureSetting.secureString(key, LEGACY_TRUSTSTORE_PASSWORD_TEMPLATE.apply(key.replace("truststore.secure_password",
|
||||
"truststore.password")));
|
||||
public static final Setting<SecureString> TRUSTSTORE_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.truststore.secure_password", TRUSTSTORE_PASSWORD_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<String>> KEY_STORE_ALGORITHM_TEMPLATE = key ->
|
||||
new Setting<>(key, s -> KeyManagerFactory.getDefaultAlgorithm(),
|
||||
Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<String> KEY_STORE_ALGORITHM_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.keystore.algorithm", KEY_STORE_ALGORITHM_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<String>> TRUST_STORE_ALGORITHM_TEMPLATE = key ->
|
||||
new Setting<>(key, s -> TrustManagerFactory.getDefaultAlgorithm(),
|
||||
Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<String> TRUST_STORE_ALGORITHM_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.truststore.algorithm", TRUST_STORE_ALGORITHM_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<SecureString>> LEGACY_KEY_PASSWORD_TEMPLATE = key -> new Setting<>(key, "",
|
||||
SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope);
|
||||
public static final Setting<SecureString> LEGACY_KEY_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.key_passphrase", LEGACY_KEY_PASSWORD_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<SecureString>> KEY_PASSWORD_TEMPLATE = key ->
|
||||
SecureSetting.secureString(key, LEGACY_KEY_PASSWORD_TEMPLATE.apply(key.replace("secure_key_passphrase",
|
||||
"key_passphrase")));
|
||||
public static final Setting<SecureString> KEY_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.secure_key_passphrase", KEY_PASSWORD_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<Optional<String>>> CERT_TEMPLATE = key -> new Setting<>(key, s -> null,
|
||||
Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<Optional<String>> CERT_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.certificate", CERT_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<List<String>>> CAPATH_SETTING_TEMPLATE = key -> Setting.listSetting(key, Collections
|
||||
.emptyList(), Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<List<String>> CAPATH_SETTING_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.certificate_authorities", CAPATH_SETTING_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<Optional<SSLClientAuth>>> CLIENT_AUTH_SETTING_TEMPLATE =
|
||||
key -> new Setting<>(key, (String) null, s -> s == null ? Optional.empty() : Optional.of(SSLClientAuth.parse(s)),
|
||||
Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<Optional<SSLClientAuth>> CLIENT_AUTH_SETTING_PROFILES = Setting.affixKeySetting("transport.profiles.",
|
||||
"xpack.security.ssl.client_authentication", CLIENT_AUTH_SETTING_TEMPLATE);
|
||||
|
||||
private static final Function<String, Setting<Optional<VerificationMode>>> VERIFICATION_MODE_SETTING_TEMPLATE =
|
||||
key -> new Setting<>(key, (String) null, s -> s == null ? Optional.empty() : Optional.of(VerificationMode.parse(s)),
|
||||
Property.NodeScope, Property.Filtered);
|
||||
public static final Setting<Optional<VerificationMode>> VERIFICATION_MODE_SETTING_PROFILES = Setting.affixKeySetting(
|
||||
"transport.profiles.", "xpack.security.ssl.verification_mode", VERIFICATION_MODE_SETTING_TEMPLATE);
|
||||
|
||||
/**
|
||||
* @see #withoutPrefix
|
||||
* @see #withPrefix
|
||||
@ -58,38 +161,25 @@ public class SSLConfigurationSettings {
|
||||
*/
|
||||
private SSLConfigurationSettings(String prefix) {
|
||||
assert prefix != null : "Prefix cannot be null (but can be blank)";
|
||||
|
||||
ciphers = Setting.listSetting(prefix + "cipher_suites", Collections.emptyList(), Function.identity(),
|
||||
Property.NodeScope, Property.Filtered);
|
||||
supportedProtocols = Setting.listSetting(prefix + "supported_protocols", Collections.emptyList(), Function.identity(),
|
||||
Property.NodeScope, Property.Filtered);
|
||||
keystorePath = new Setting<>(prefix + "keystore.path", s -> null, Optional::ofNullable,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
legacyKeystorePassword = new Setting<>(prefix + "keystore.password", "", SecureString::new,
|
||||
Property.Deprecated, Property.Filtered, Property.NodeScope);
|
||||
keystorePassword = SecureSetting.secureString(prefix + "keystore.secure_password", legacyKeystorePassword);
|
||||
legacyKeystoreKeyPassword = new Setting<>(prefix + "keystore.key_password", "",
|
||||
SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope);
|
||||
keystoreKeyPassword = SecureSetting.secureString(prefix + "keystore.secure_key_password", legacyKeystoreKeyPassword);
|
||||
truststorePath = new Setting<>(prefix + "truststore.path", s -> null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
legacyTruststorePassword = new Setting<>(prefix + "truststore.password", "", SecureString::new,
|
||||
Property.Deprecated, Property.Filtered, Property.NodeScope);
|
||||
truststorePassword = SecureSetting.secureString(prefix + "truststore.secure_password", legacyTruststorePassword);
|
||||
keystoreAlgorithm = new Setting<>(prefix + "keystore.algorithm", s -> KeyManagerFactory.getDefaultAlgorithm(),
|
||||
Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
truststoreAlgorithm = new Setting<>(prefix + "truststore.algorithm", s -> TrustManagerFactory.getDefaultAlgorithm(),
|
||||
Function.identity(), Property.NodeScope, Property.Filtered);
|
||||
keyPath = new Setting<>(prefix + "key", s -> null, Optional::ofNullable, Setting.Property.NodeScope, Setting.Property.Filtered);
|
||||
legacyKeyPassword = new Setting<>(prefix + "key_passphrase", "", SecureString::new,
|
||||
Property.Deprecated, Property.Filtered, Property.NodeScope);
|
||||
keyPassword = SecureSetting.secureString(prefix + "secure_key_passphrase", legacyKeyPassword);
|
||||
cert =new Setting<>(prefix + "certificate", s -> null, Optional::ofNullable, Property.NodeScope, Property.Filtered);
|
||||
caPaths = Setting.listSetting(prefix + "certificate_authorities", Collections.emptyList(), Function.identity(),
|
||||
Property.NodeScope, Property.Filtered);
|
||||
clientAuth = new Setting<>(prefix + "client_authentication", (String) null,
|
||||
s -> s == null ? Optional.empty() : Optional.of(SSLClientAuth.parse(s)), Property.NodeScope, Property.Filtered);
|
||||
verificationMode = new Setting<>(prefix + "verification_mode", (String) null,
|
||||
s -> s == null ? Optional.empty() : Optional.of(VerificationMode.parse(s)), Property.NodeScope, Property.Filtered);
|
||||
ciphers = CIPHERS_SETTING_TEMPLATE.apply(prefix + "cipher_suites");
|
||||
supportedProtocols = SUPPORTED_PROTOCOLS_TEMPLATE.apply(prefix + "supported_protocols");
|
||||
keystorePath = KEYSTORE_PATH_TEMPLATE.apply(prefix + "keystore.path");
|
||||
legacyKeystorePassword = LEGACY_KEYSTORE_PASSWORD_TEMPLATE.apply(prefix + "keystore.password");
|
||||
keystorePassword = KEYSTORE_PASSWORD_TEMPLATE.apply(prefix + "keystore.secure_password");
|
||||
legacyKeystoreKeyPassword = LEGACY_KEYSTORE_KEY_PASSWORD_TEMPLATE.apply(prefix + "keystore.key_password");
|
||||
keystoreKeyPassword = KEYSTORE_KEY_PASSWORD_TEMPLATE.apply(prefix + "keystore.secure_key_password");
|
||||
truststorePath = TRUST_STORE_PATH_TEMPLATE.apply(prefix + "truststore.path");
|
||||
legacyTruststorePassword = LEGACY_TRUSTSTORE_PASSWORD_TEMPLATE.apply(prefix + "truststore.password");
|
||||
truststorePassword = TRUSTSTORE_PASSWORD_TEMPLATE.apply(prefix + "truststore.secure_password");
|
||||
keystoreAlgorithm = KEY_STORE_ALGORITHM_TEMPLATE.apply(prefix + "keystore.algorithm");
|
||||
truststoreAlgorithm = TRUST_STORE_ALGORITHM_TEMPLATE.apply(prefix + "truststore.algorithm");
|
||||
keyPath = KEY_PATH_TEMPLATE.apply(prefix + "key");
|
||||
legacyKeyPassword = LEGACY_KEY_PASSWORD_TEMPLATE.apply(prefix + "key_passphrase");
|
||||
keyPassword = KEY_PASSWORD_TEMPLATE.apply(prefix + "secure_key_passphrase");
|
||||
cert = CERT_TEMPLATE.apply(prefix + "certificate");
|
||||
caPaths = CAPATH_SETTING_TEMPLATE.apply(prefix + "certificate_authorities");
|
||||
clientAuth = CLIENT_AUTH_SETTING_TEMPLATE.apply(prefix + "client_authentication");
|
||||
verificationMode = VERIFICATION_MODE_SETTING_TEMPLATE.apply(prefix + "verification_mode");
|
||||
|
||||
this.allSettings = Arrays.asList(ciphers, supportedProtocols, keystorePath, keystorePassword, keystoreAlgorithm,
|
||||
keystoreKeyPassword, truststorePath, truststorePassword, truststoreAlgorithm, keyPath, keyPassword, cert, caPaths,
|
||||
@ -117,4 +207,14 @@ public class SSLConfigurationSettings {
|
||||
assert prefix.endsWith("ssl.") : "The ssl config prefix (" + prefix + ") should end in 'ssl.'";
|
||||
return new SSLConfigurationSettings(prefix);
|
||||
}
|
||||
|
||||
|
||||
public static Collection<Setting<?>> getProfileSettings() {
|
||||
return Arrays.asList(CIPHERS_SETTING_PROFILES, SUPPORTED_PROTOCOLS_PROFILES, KEYSTORE_PATH_PROFILES,
|
||||
LEGACY_KEYSTORE_PASSWORD_PROFILES, KEYSTORE_PASSWORD_PROFILES, LEGACY_KEYSTORE_KEY_PASSWORD_PROFILES,
|
||||
KEYSTORE_KEY_PASSWORD_PROFILES, TRUST_STORE_PATH_PROFILES, LEGACY_TRUSTSTORE_PASSWORD_PROFILES,
|
||||
TRUSTSTORE_PASSWORD_PROFILES, KEY_STORE_ALGORITHM_PROFILES, TRUST_STORE_ALGORITHM_PROFILES,KEY_PATH_PROFILES,
|
||||
LEGACY_KEY_PASSWORD_PROFILES, KEY_PASSWORD_PROFILES,CERT_PROFILES,CAPATH_SETTING_PROFILES,
|
||||
CLIENT_AUTH_SETTING_PROFILES, VERIFICATION_MODE_SETTING_PROFILES);
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,6 @@ import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.transport.TcpTransport;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.common.socket.SocketAccess;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
@ -852,7 +851,7 @@ public class SSLService extends AbstractComponent {
|
||||
|
||||
private static List<Settings> getTransportProfileSSLSettings(Settings settings) {
|
||||
List<Settings> sslSettings = new ArrayList<>();
|
||||
Map<String, Settings> profiles = TcpTransport.TRANSPORT_PROFILES_SETTING.get(settings).getAsGroups(true);
|
||||
Map<String, Settings> profiles = settings.getGroups("transport.profiles.", true);
|
||||
for (Entry<String, Settings> entry : profiles.entrySet()) {
|
||||
Settings profileSettings = entry.getValue().getByPrefix("xpack.security.ssl.");
|
||||
if (profileSettings.isEmpty() == false) {
|
||||
|
@ -79,7 +79,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
|
||||
|
||||
@Override
|
||||
protected MlMetadata doParseInstance(XContentParser parser) {
|
||||
return MlMetadata.ML_METADATA_PARSER.apply(parser, null).build();
|
||||
return MlMetadata.METADATA_PARSER.apply(parser, null).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -29,7 +29,7 @@ public class ChunkingConfigTests extends AbstractSerializingTestCase<ChunkingCon
|
||||
|
||||
@Override
|
||||
protected ChunkingConfig doParseInstance(XContentParser parser) {
|
||||
return ChunkingConfig.PARSER.apply(parser, null);
|
||||
return ChunkingConfig.CONFIG_PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
public void testConstructorGivenAutoAndTimeSpan() {
|
||||
@ -63,4 +63,4 @@ public class ChunkingConfigTests extends AbstractSerializingTestCase<ChunkingCon
|
||||
private static TimeValue randomPositiveSecondsMinutesHours() {
|
||||
return new TimeValue(randomIntBetween(1, 1000), randomFrom(Arrays.asList(TimeUnit.SECONDS, TimeUnit.MINUTES, TimeUnit.HOURS)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -12,7 +12,9 @@ import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
@ -117,7 +119,29 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
||||
|
||||
@Override
|
||||
protected DatafeedConfig doParseInstance(XContentParser parser) {
|
||||
return DatafeedConfig.PARSER.apply(parser, null).build();
|
||||
return DatafeedConfig.CONFIG_PARSER.apply(parser, null).build();
|
||||
}
|
||||
|
||||
private static final String FUTURE_DATAFEED = "{\n" +
|
||||
" \"datafeed_id\": \"farequote-datafeed\",\n" +
|
||||
" \"job_id\": \"farequote\",\n" +
|
||||
" \"frequency\": \"1h\",\n" +
|
||||
" \"indices\": [\"farequote1\", \"farequote2\"],\n" +
|
||||
" \"tomorrows_technology_today\": \"amazing\",\n" +
|
||||
" \"scroll_size\": 1234\n" +
|
||||
"}";
|
||||
|
||||
public void testFutureConfigParse() throws IOException {
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, FUTURE_DATAFEED);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> DatafeedConfig.CONFIG_PARSER.apply(parser, null).build());
|
||||
assertEquals("[datafeed_config] unknown field [tomorrows_technology_today], parser not found", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFutureMetadataParse() throws IOException {
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, FUTURE_DATAFEED);
|
||||
// Unlike the config version of this test, the metadata parser should tolerate the unknown future field
|
||||
assertNotNull(DatafeedConfig.METADATA_PARSER.apply(parser, null).build());
|
||||
}
|
||||
|
||||
public void testCopyConstructor() {
|
||||
|
@ -87,7 +87,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
||||
|
||||
@Override
|
||||
protected AnalysisConfig doParseInstance(XContentParser parser) {
|
||||
return AnalysisConfig.PARSER.apply(parser, null).build();
|
||||
return AnalysisConfig.CONFIG_PARSER.apply(parser, null).build();
|
||||
}
|
||||
|
||||
public void testFieldConfiguration_singleDetector_notPreSummarised() {
|
||||
|
@ -6,16 +6,32 @@
|
||||
package org.elasticsearch.xpack.ml.job.config;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class AnalysisLimitsTests extends AbstractSerializingTestCase<AnalysisLimits> {
|
||||
|
||||
@Override
|
||||
protected AnalysisLimits createTestInstance() {
|
||||
return new AnalysisLimits(randomBoolean() ? randomLong() : null, randomBoolean() ? randomNonNegativeLong() : null);
|
||||
return createRandomized();
|
||||
}
|
||||
|
||||
public static AnalysisLimits createRandomized() {
|
||||
return new AnalysisLimits(randomBoolean() ? (long) randomIntBetween(1, 1000000) : null,
|
||||
randomBoolean() ? randomNonNegativeLong() : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -25,7 +41,69 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase<AnalysisLim
|
||||
|
||||
@Override
|
||||
protected AnalysisLimits doParseInstance(XContentParser parser) {
|
||||
return AnalysisLimits.PARSER.apply(parser, null);
|
||||
return AnalysisLimits.CONFIG_PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
public void testParseModelMemoryLimitGivenNegativeNumber() throws IOException {
|
||||
String json = "{\"model_memory_limit\": -1}";
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json);
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.CONFIG_PARSER.apply(parser, null));
|
||||
assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = -1"));
|
||||
}
|
||||
|
||||
public void testParseModelMemoryLimitGivenZero() throws IOException {
|
||||
String json = "{\"model_memory_limit\": 0}";
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json);
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.CONFIG_PARSER.apply(parser, null));
|
||||
assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0"));
|
||||
}
|
||||
|
||||
public void testParseModelMemoryLimitGivenPositiveNumber() throws IOException {
|
||||
String json = "{\"model_memory_limit\": 2048}";
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json);
|
||||
|
||||
AnalysisLimits limits = AnalysisLimits.CONFIG_PARSER.apply(parser, null);
|
||||
|
||||
assertThat(limits.getModelMemoryLimit(), equalTo(2048L));
|
||||
}
|
||||
|
||||
public void testParseModelMemoryLimitGivenNegativeString() throws IOException {
|
||||
String json = "{\"model_memory_limit\":\"-4MB\"}";
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json);
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.CONFIG_PARSER.apply(parser, null));
|
||||
assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = -4"));
|
||||
}
|
||||
|
||||
public void testParseModelMemoryLimitGivenZeroString() throws IOException {
|
||||
String json = "{\"model_memory_limit\":\"0MB\"}";
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json);
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.CONFIG_PARSER.apply(parser, null));
|
||||
assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0"));
|
||||
}
|
||||
|
||||
public void testParseModelMemoryLimitGivenLessThanOneMBString() throws IOException {
|
||||
String json = "{\"model_memory_limit\":\"1000Kb\"}";
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json);
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> AnalysisLimits.CONFIG_PARSER.apply(parser, null));
|
||||
assertThat(e.getRootCause().getMessage(), containsString("model_memory_limit must be at least 1 MiB. Value = 0"));
|
||||
}
|
||||
|
||||
public void testParseModelMemoryLimitGivenStringMultipleOfMBs() throws IOException {
|
||||
String json = "{\"model_memory_limit\":\"4g\"}";
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json);
|
||||
|
||||
AnalysisLimits limits = AnalysisLimits.CONFIG_PARSER.apply(parser, null);
|
||||
|
||||
assertThat(limits.getModelMemoryLimit(), equalTo(4096L));
|
||||
}
|
||||
|
||||
public void testParseModelMemoryLimitGivenStringNonMultipleOfMBs() throws IOException {
|
||||
String json = "{\"model_memory_limit\":\"1300kb\"}";
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, json);
|
||||
|
||||
AnalysisLimits limits = AnalysisLimits.CONFIG_PARSER.apply(parser, null);
|
||||
|
||||
assertThat(limits.getModelMemoryLimit(), equalTo(1L));
|
||||
}
|
||||
|
||||
public void testEquals_GivenEqual() {
|
||||
@ -71,7 +149,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase<AnalysisLim
|
||||
}
|
||||
|
||||
public void testVerify_GivenValid() {
|
||||
new AnalysisLimits(0L, 0L);
|
||||
new AnalysisLimits(null, 1L);
|
||||
new AnalysisLimits(1L, null);
|
||||
new AnalysisLimits(1L, 1L);
|
||||
}
|
||||
|
@ -200,7 +200,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
||||
BytesArray json = new BytesArray("{ \"format\":\"INEXISTENT_FORMAT\" }");
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, json);
|
||||
ParsingException ex = expectThrows(ParsingException.class,
|
||||
() -> DataDescription.PARSER.apply(parser, null));
|
||||
() -> DataDescription.CONFIG_PARSER.apply(parser, null));
|
||||
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [format]"));
|
||||
Throwable cause = ex.getCause();
|
||||
assertNotNull(cause);
|
||||
@ -213,7 +213,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
||||
BytesArray json = new BytesArray("{ \"field_delimiter\":\",,\" }");
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, json);
|
||||
ParsingException ex = expectThrows(ParsingException.class,
|
||||
() -> DataDescription.PARSER.apply(parser, null));
|
||||
() -> DataDescription.CONFIG_PARSER.apply(parser, null));
|
||||
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [field_delimiter]"));
|
||||
Throwable cause = ex.getCause();
|
||||
assertNotNull(cause);
|
||||
@ -226,7 +226,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
||||
BytesArray json = new BytesArray("{ \"quote_character\":\"''\" }");
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, json);
|
||||
ParsingException ex = expectThrows(ParsingException.class,
|
||||
() -> DataDescription.PARSER.apply(parser, null));
|
||||
() -> DataDescription.CONFIG_PARSER.apply(parser, null));
|
||||
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [quote_character]"));
|
||||
Throwable cause = ex.getCause();
|
||||
assertNotNull(cause);
|
||||
@ -270,6 +270,6 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
|
||||
|
||||
@Override
|
||||
protected DataDescription doParseInstance(XContentParser parser) {
|
||||
return DataDescription.PARSER.apply(parser, null).build();
|
||||
return DataDescription.CONFIG_PARSER.apply(parser, null).build();
|
||||
}
|
||||
}
|
||||
|
@ -119,6 +119,6 @@ public class DetectionRuleTests extends AbstractSerializingTestCase<DetectionRul
|
||||
|
||||
@Override
|
||||
protected DetectionRule doParseInstance(XContentParser parser) {
|
||||
return DetectionRule.PARSER.apply(parser, null).build();
|
||||
return DetectionRule.CONFIG_PARSER.apply(parser, null).build();
|
||||
}
|
||||
}
|
||||
|
@ -199,7 +199,7 @@ public class DetectorTests extends AbstractSerializingTestCase<Detector> {
|
||||
|
||||
@Override
|
||||
protected Detector doParseInstance(XContentParser parser) {
|
||||
return Detector.PARSER.apply(parser, null).build();
|
||||
return Detector.CONFIG_PARSER.apply(parser, null).build();
|
||||
}
|
||||
|
||||
public void testVerifyFieldNames_givenInvalidChars() {
|
||||
|
@ -38,8 +38,7 @@ public class JobBuilderTests extends AbstractSerializingTestCase<Job.Builder> {
|
||||
builder.setAnalysisConfig(AnalysisConfigTests.createRandomized());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setAnalysisLimits(new AnalysisLimits(randomNonNegativeLong(),
|
||||
randomNonNegativeLong()));
|
||||
builder.setAnalysisLimits(AnalysisLimitsTests.createRandomized());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
DataDescription.Builder dataDescription = new DataDescription.Builder();
|
||||
@ -82,6 +81,6 @@ public class JobBuilderTests extends AbstractSerializingTestCase<Job.Builder> {
|
||||
|
||||
@Override
|
||||
protected Job.Builder doParseInstance(XContentParser parser) {
|
||||
return Job.PARSER.apply(parser, null);
|
||||
return Job.CONFIG_PARSER.apply(parser, null);
|
||||
}
|
||||
}
|
||||
|
@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
@ -34,6 +35,21 @@ import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class JobTests extends AbstractSerializingTestCase<Job> {
|
||||
|
||||
private static final String FUTURE_JOB = "{\n" +
|
||||
" \"job_id\": \"farequote\",\n" +
|
||||
" \"create_time\": 1234567890000,\n" +
|
||||
" \"tomorrows_technology_today\": \"wow\",\n" +
|
||||
" \"analysis_config\": {\n" +
|
||||
" \"bucket_span\": \"1h\",\n" +
|
||||
" \"something_new\": \"gasp\",\n" +
|
||||
" \"detectors\": [{\"function\": \"metric\", \"field_name\": \"responsetime\", \"by_field_name\": \"airline\"}]\n" +
|
||||
" },\n" +
|
||||
" \"data_description\": {\n" +
|
||||
" \"time_field\": \"time\",\n" +
|
||||
" \"the_future\": 123\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
@Override
|
||||
protected Job createTestInstance() {
|
||||
return createRandomizedJob();
|
||||
@ -46,7 +62,20 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
||||
|
||||
@Override
|
||||
protected Job doParseInstance(XContentParser parser) {
|
||||
return Job.PARSER.apply(parser, null).build();
|
||||
return Job.CONFIG_PARSER.apply(parser, null).build();
|
||||
}
|
||||
|
||||
public void testFutureConfigParse() throws IOException {
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, FUTURE_JOB);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> Job.CONFIG_PARSER.apply(parser, null).build());
|
||||
assertEquals("[job_details] unknown field [tomorrows_technology_today], parser not found", e.getMessage());
|
||||
}
|
||||
|
||||
public void testFutureMetadataParse() throws IOException {
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, FUTURE_JOB);
|
||||
// Unlike the config version of this test, the metadata parser should tolerate the unknown future field
|
||||
assertNotNull(Job.METADATA_PARSER.apply(parser, null).build());
|
||||
}
|
||||
|
||||
public void testConstructor_GivenEmptyJobConfiguration() {
|
||||
@ -479,7 +508,7 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
||||
builder.setLastDataTime(new Date(randomNonNegativeLong()));
|
||||
}
|
||||
builder.setAnalysisConfig(AnalysisConfigTests.createRandomized());
|
||||
builder.setAnalysisLimits(new AnalysisLimits(randomNonNegativeLong(), randomNonNegativeLong()));
|
||||
builder.setAnalysisLimits(AnalysisLimitsTests.createRandomized());
|
||||
|
||||
DataDescription.Builder dataDescription = new DataDescription.Builder();
|
||||
dataDescription.setFormat(randomFrom(DataDescription.DataFormat.values()));
|
||||
|
@ -51,7 +51,7 @@ public class JobUpdateTests extends AbstractSerializingTestCase<JobUpdate> {
|
||||
update.setModelPlotConfig(new ModelPlotConfig(randomBoolean(), randomAlphaOfLength(10)));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
update.setAnalysisLimits(new AnalysisLimits(randomNonNegativeLong(), randomNonNegativeLong()));
|
||||
update.setAnalysisLimits(AnalysisLimitsTests.createRandomized());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
update.setRenormalizationWindowDays(randomNonNegativeLong());
|
||||
|
@ -31,6 +31,6 @@ public class ModelPlotConfigTests extends AbstractSerializingTestCase<ModelPlotC
|
||||
|
||||
@Override
|
||||
protected ModelPlotConfig doParseInstance(XContentParser parser) {
|
||||
return ModelPlotConfig.PARSER.apply(parser, null);
|
||||
return ModelPlotConfig.CONFIG_PARSER.apply(parser, null);
|
||||
}
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ public class RuleConditionTests extends AbstractSerializingTestCase<RuleConditio
|
||||
|
||||
@Override
|
||||
protected RuleCondition doParseInstance(XContentParser parser) {
|
||||
return RuleCondition.PARSER.apply(parser, null);
|
||||
return RuleCondition.CONFIG_PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
public void testConstructor() {
|
||||
|
@ -39,15 +39,6 @@ public class AnalysisLimitsWriterTests extends ESTestCase {
|
||||
verify(writer).write("[memory]\n[results]\n");
|
||||
}
|
||||
|
||||
public void testWrite_GivenModelMemoryLimitIsZero() throws IOException {
|
||||
AnalysisLimits limits = new AnalysisLimits(0L, null);
|
||||
AnalysisLimitsWriter analysisLimitsWriter = new AnalysisLimitsWriter(limits, writer);
|
||||
|
||||
analysisLimitsWriter.write();
|
||||
|
||||
verify(writer).write("[memory]\n[results]\n");
|
||||
}
|
||||
|
||||
public void testWrite_GivenModelMemoryLimitWasSet() throws IOException {
|
||||
AnalysisLimits limits = new AnalysisLimits(10L, null);
|
||||
AnalysisLimitsWriter analysisLimitsWriter = new AnalysisLimitsWriter(limits, writer);
|
||||
@ -58,7 +49,7 @@ public class AnalysisLimitsWriterTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testWrite_GivenCategorizationExamplesLimitWasSet() throws IOException {
|
||||
AnalysisLimits limits = new AnalysisLimits(0L, 5L);
|
||||
AnalysisLimits limits = new AnalysisLimits(null, 5L);
|
||||
AnalysisLimitsWriter analysisLimitsWriter = new AnalysisLimitsWriter(limits, writer);
|
||||
|
||||
analysisLimitsWriter.write();
|
||||
|
@ -349,6 +349,7 @@ public class AuthorizationServiceTests extends ESTestCase {
|
||||
verifyNoMoreInteractions(auditTrail);
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/1217")
|
||||
public void testElasticUserOnlyAuthorizedForChangePasswordRequestsInSetupMode() {
|
||||
final User user = new ElasticUser(true, true);
|
||||
final ChangePasswordRequest changePasswordrequest = new ChangePasswordRequestBuilder(mock(Client.class))
|
||||
|
@ -5,6 +5,10 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.transport;
|
||||
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.network.NetworkAddress;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -17,7 +21,14 @@ import org.elasticsearch.node.NodeValidationException;
|
||||
import org.elasticsearch.test.SecurityIntegTestCase;
|
||||
import org.elasticsearch.test.SecuritySettingsSource;
|
||||
import org.elasticsearch.test.discovery.TestZenDiscovery;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.ConnectionProfile;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.transport.TransportException;
|
||||
import org.elasticsearch.transport.TransportRequestOptions;
|
||||
import org.elasticsearch.transport.TransportResponse;
|
||||
import org.elasticsearch.transport.TransportResponseHandler;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.XPackSettings;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
@ -29,10 +40,13 @@ import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore;
|
||||
import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
|
||||
public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase {
|
||||
@ -65,7 +79,6 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase
|
||||
settingsBuilder.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("transport.profiles.client.xpack.security.ssl.truststore.path", store) // settings for client truststore
|
||||
.put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED)
|
||||
.put("transport.profiles.default.type", "node")
|
||||
.put("transport.profiles.client.xpack.security.type", "client")
|
||||
.put("transport.profiles.client.port", randomClientPortRange)
|
||||
// make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent
|
||||
@ -73,6 +86,9 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase
|
||||
.put("xpack.security.audit.enabled", false)
|
||||
.put(XPackSettings.WATCHER_ENABLED.getKey(), false)
|
||||
.put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false);
|
||||
if (randomBoolean()) {
|
||||
settingsBuilder.put("transport.profiles.default.xpack.security.type", "node"); // this is default lets set it randomly
|
||||
}
|
||||
|
||||
SecuritySettingsSource.addSecureSettings(settingsBuilder, secureSettings ->
|
||||
secureSettings.setString("transport.profiles.client.xpack.security.ssl.truststore.secure_password", "testnode"));
|
||||
@ -111,7 +127,7 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase
|
||||
}
|
||||
}
|
||||
|
||||
public void testThatConnectionToClientTypeConnectionIsRejected() throws IOException, NodeValidationException {
|
||||
public void testThatConnectionToClientTypeConnectionIsRejected() throws IOException, NodeValidationException, InterruptedException {
|
||||
Path home = createTempDir();
|
||||
Path xpackConf = home.resolve("config").resolve(XPackPlugin.NAME);
|
||||
Files.createDirectories(xpackConf);
|
||||
@ -144,23 +160,49 @@ public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase
|
||||
addSSLSettingsForStore(nodeSettings, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode");
|
||||
try (Node node = new MockNode(nodeSettings.build(), Arrays.asList(XPackPlugin.class, TestZenDiscovery.TestPlugin.class))) {
|
||||
node.start();
|
||||
TransportService instance = node.injector().getInstance(TransportService.class);
|
||||
try (Transport.Connection connection = instance.openConnection(new DiscoveryNode("theNode", transportAddress, Version.CURRENT),
|
||||
ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null))) {
|
||||
// handshake should be ok
|
||||
final DiscoveryNode handshake = instance.handshake(connection, 10000);
|
||||
assertEquals(transport.boundAddress().publishAddress(), handshake.getAddress());
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
instance.sendRequest(connection, NodeMappingRefreshAction.ACTION_NAME,
|
||||
new NodeMappingRefreshAction.NodeMappingRefreshRequest("foo", "bar", "baz"),
|
||||
TransportRequestOptions.EMPTY,
|
||||
new TransportResponseHandler<TransportResponse>() {
|
||||
@Override
|
||||
public TransportResponse newInstance() {
|
||||
fail("never get that far");
|
||||
return null;
|
||||
}
|
||||
|
||||
// assert that node is not connected by waiting for the timeout
|
||||
try {
|
||||
// updating cluster settings requires a master. since the node should not be able to
|
||||
// connect to the cluster, there should be no master, and therefore this
|
||||
// operation should fail. we can't use cluster health/stats here to and
|
||||
// wait for a timeout, because as long as the node is not connected to the cluster
|
||||
// the license is disabled and therefore blocking health & stats calls.
|
||||
node.client().admin().cluster().prepareUpdateSettings()
|
||||
.setTransientSettings(singletonMap("logger.org.elasticsearch.xpack.security", "DEBUG"))
|
||||
.setMasterNodeTimeout(TimeValue.timeValueMillis(100))
|
||||
.get();
|
||||
fail("Expected to fail update settings as the node should not be able to connect to the cluster, cause there should be " +
|
||||
"no master");
|
||||
} catch (MasterNotDiscoveredException e) {
|
||||
// expected
|
||||
logger.error("expected exception", e);
|
||||
@Override
|
||||
public void handleResponse(TransportResponse response) {
|
||||
try {
|
||||
fail("never get that far");
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
try {
|
||||
assertThat(exp.getCause(), instanceOf(ElasticsearchSecurityException.class));
|
||||
assertThat(exp.getCause().getMessage(),
|
||||
equalTo("executing internal/shard actions is considered malicious and forbidden"));
|
||||
} finally {
|
||||
latch.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String executor() {
|
||||
return ThreadPool.Names.SAME;
|
||||
}
|
||||
});
|
||||
latch.await();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,6 @@ import org.elasticsearch.node.MockNode;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.junit.annotations.Network;
|
||||
import org.elasticsearch.transport.TcpTransport;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.xpack.XPackPlugin;
|
||||
import org.elasticsearch.xpack.security.audit.AuditTrailService;
|
||||
@ -62,7 +61,8 @@ public class IPFilterTests extends ESTestCase {
|
||||
IPFilter.IP_FILTER_ENABLED_SETTING,
|
||||
IPFilter.TRANSPORT_FILTER_ALLOW_SETTING,
|
||||
IPFilter.TRANSPORT_FILTER_DENY_SETTING,
|
||||
TcpTransport.TRANSPORT_PROFILES_SETTING)));
|
||||
IPFilter.PROFILE_FILTER_ALLOW_SETTING,
|
||||
IPFilter.PROFILE_FILTER_DENY_SETTING)));
|
||||
|
||||
httpTransport = mock(HttpServerTransport.class);
|
||||
TransportAddress httpAddress = new TransportAddress(InetAddress.getLoopbackAddress(), 9200);
|
||||
@ -145,6 +145,27 @@ public class IPFilterTests extends ESTestCase {
|
||||
assertAddressIsDeniedForProfile("client", "192.168.0.2");
|
||||
}
|
||||
|
||||
public void testThatProfilesAreUpdateable() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("xpack.security.transport.filter.allow", "localhost")
|
||||
.put("xpack.security.transport.filter.deny", "_all")
|
||||
.put("transport.profiles.client.xpack.security.filter.allow", "192.168.0.1")
|
||||
.put("transport.profiles.client.xpack.security.filter.deny", "_all")
|
||||
.build();
|
||||
ipFilter = new IPFilter(settings, auditTrail, clusterSettings, licenseState);
|
||||
ipFilter.setBoundTransportAddress(transport.boundAddress(), transport.profileBoundAddresses());
|
||||
Settings newSettings = Settings.builder().putArray("transport.profiles.client.xpack.security.filter.allow", "192.168.0.1",
|
||||
"192.168.0.2")
|
||||
.put("transport.profiles.client.xpack.security.filter.deny", "192.168.0.3").build();
|
||||
Settings.Builder updatedSettingsBuilder = Settings.builder();
|
||||
clusterSettings.updateDynamicSettings(newSettings, updatedSettingsBuilder, Settings.builder(), "test");
|
||||
clusterSettings.applySettings(updatedSettingsBuilder.build());
|
||||
assertAddressIsAllowed("127.0.0.1");
|
||||
assertAddressIsDenied("192.168.0.1");
|
||||
assertAddressIsAllowedForProfile("client", "192.168.0.1", "192.168.0.2");
|
||||
assertAddressIsDeniedForProfile("client", "192.168.0.3");
|
||||
}
|
||||
|
||||
public void testThatAllowWinsOverDeny() throws Exception {
|
||||
Settings settings = Settings.builder()
|
||||
.put("xpack.security.transport.filter.allow", "10.0.0.1")
|
||||
|
@ -17,6 +17,7 @@ import org.elasticsearch.license.XPackLicenseState;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.transport.TcpTransport;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
import org.elasticsearch.xpack.security.Security;
|
||||
import org.elasticsearch.xpack.security.audit.AuditTrailService;
|
||||
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
|
||||
import org.junit.Before;
|
||||
@ -54,7 +55,8 @@ public class IpFilterRemoteAddressFilterTests extends ESTestCase {
|
||||
IPFilter.IP_FILTER_ENABLED_SETTING,
|
||||
IPFilter.TRANSPORT_FILTER_ALLOW_SETTING,
|
||||
IPFilter.TRANSPORT_FILTER_DENY_SETTING,
|
||||
TcpTransport.TRANSPORT_PROFILES_SETTING)));
|
||||
IPFilter.PROFILE_FILTER_ALLOW_SETTING,
|
||||
IPFilter.PROFILE_FILTER_DENY_SETTING)));
|
||||
XPackLicenseState licenseState = mock(XPackLicenseState.class);
|
||||
when(licenseState.isIpFilteringAllowed()).thenReturn(true);
|
||||
AuditTrailService auditTrailService = new AuditTrailService(settings, Collections.emptyList(), licenseState);
|
||||
|
@ -73,7 +73,7 @@ public class SecurityNetty4TransportTests extends ESTestCase {
|
||||
public void testThatProfileTakesDefaultSSLSetting() throws Exception {
|
||||
SecurityNetty4Transport transport = createTransport();
|
||||
Netty4MockUtil.setOpenChannelsHandlerToMock(transport);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("default");
|
||||
final EmbeddedChannel ch = new EmbeddedChannel(handler);
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine(), notNullValue());
|
||||
}
|
||||
@ -81,7 +81,7 @@ public class SecurityNetty4TransportTests extends ESTestCase {
|
||||
public void testDefaultClientAuth() throws Exception {
|
||||
SecurityNetty4Transport transport = createTransport();
|
||||
Netty4MockUtil.setOpenChannelsHandlerToMock(transport);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("default");
|
||||
final EmbeddedChannel ch = new EmbeddedChannel(handler);
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true));
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false));
|
||||
@ -96,7 +96,7 @@ public class SecurityNetty4TransportTests extends ESTestCase {
|
||||
sslService = new SSLService(settings, env);
|
||||
SecurityNetty4Transport transport = createTransport(settings);
|
||||
Netty4MockUtil.setOpenChannelsHandlerToMock(transport);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("default");
|
||||
final EmbeddedChannel ch = new EmbeddedChannel(handler);
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true));
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false));
|
||||
@ -111,7 +111,7 @@ public class SecurityNetty4TransportTests extends ESTestCase {
|
||||
sslService = new SSLService(settings, env);
|
||||
SecurityNetty4Transport transport = createTransport(settings);
|
||||
Netty4MockUtil.setOpenChannelsHandlerToMock(transport);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("default");
|
||||
final EmbeddedChannel ch = new EmbeddedChannel(handler);
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false));
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false));
|
||||
@ -126,7 +126,7 @@ public class SecurityNetty4TransportTests extends ESTestCase {
|
||||
sslService = new SSLService(settings, env);
|
||||
SecurityNetty4Transport transport = createTransport(settings);
|
||||
Netty4MockUtil.setOpenChannelsHandlerToMock(transport);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("default");
|
||||
final EmbeddedChannel ch = new EmbeddedChannel(handler);
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false));
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(true));
|
||||
@ -136,13 +136,13 @@ public class SecurityNetty4TransportTests extends ESTestCase {
|
||||
String value = randomFrom(SSLClientAuth.REQUIRED.name(), SSLClientAuth.REQUIRED.name().toLowerCase(Locale.ROOT));
|
||||
Settings settings = Settings.builder()
|
||||
.put(env.settings())
|
||||
.put("transport.profiles.client.port", "8000-9000")
|
||||
.put("transport.profiles.client.xpack.security.ssl.client_authentication", value)
|
||||
.build();
|
||||
sslService = new SSLService(settings, env);
|
||||
SecurityNetty4Transport transport = createTransport(settings);
|
||||
Netty4MockUtil.setOpenChannelsHandlerToMock(transport);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("client",
|
||||
Settings.builder().put("xpack.security.ssl.client_authentication", value).build());
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("client");
|
||||
final EmbeddedChannel ch = new EmbeddedChannel(handler);
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(true));
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false));
|
||||
@ -152,13 +152,13 @@ public class SecurityNetty4TransportTests extends ESTestCase {
|
||||
String value = randomFrom(SSLClientAuth.NONE.name(), SSLClientAuth.NONE.name().toLowerCase(Locale.ROOT));
|
||||
Settings settings = Settings.builder()
|
||||
.put(env.settings())
|
||||
.put("transport.profiles.client.port", "8000-9000")
|
||||
.put("transport.profiles.client.xpack.security.ssl.client_authentication", value)
|
||||
.build();
|
||||
sslService = new SSLService(settings, env);
|
||||
SecurityNetty4Transport transport = createTransport(settings);
|
||||
Netty4MockUtil.setOpenChannelsHandlerToMock(transport);
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("client",
|
||||
Settings.builder().put("xpack.security.ssl.client_authentication", value).build());
|
||||
ChannelHandler handler = transport.getServerChannelInitializer("client");
|
||||
final EmbeddedChannel ch = new EmbeddedChannel(handler);
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false));
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(false));
|
||||
@ -168,13 +168,13 @@ public class SecurityNetty4TransportTests extends ESTestCase {
|
||||
String value = randomFrom(SSLClientAuth.OPTIONAL.name(), SSLClientAuth.OPTIONAL.name().toLowerCase(Locale.ROOT));
|
||||
Settings settings = Settings.builder()
|
||||
.put(env.settings())
|
||||
.put("transport.profiles.client.port", "8000-9000")
|
||||
.put("transport.profiles.client.xpack.security.ssl.client_authentication", value)
|
||||
.build();
|
||||
sslService = new SSLService(settings, env);
|
||||
SecurityNetty4Transport transport = createTransport(settings);
|
||||
Netty4MockUtil.setOpenChannelsHandlerToMock(transport);
|
||||
final ChannelHandler handler = transport.getServerChannelInitializer("client",
|
||||
Settings.builder().put("xpack.security.ssl.client_authentication", value).build());
|
||||
final ChannelHandler handler = transport.getServerChannelInitializer("client");
|
||||
final EmbeddedChannel ch = new EmbeddedChannel(handler);
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getNeedClientAuth(), is(false));
|
||||
assertThat(ch.pipeline().get(SslHandler.class).engine().getWantClientAuth(), is(true));
|
||||
@ -197,7 +197,7 @@ public class SecurityNetty4TransportTests extends ESTestCase {
|
||||
sslService = new SSLService(settings, env);
|
||||
SecurityNetty4Transport transport = createTransport(settings);
|
||||
Netty4MockUtil.setOpenChannelsHandlerToMock(transport);
|
||||
final ChannelHandler handler = transport.getServerChannelInitializer("default", Settings.EMPTY);
|
||||
final ChannelHandler handler = transport.getServerChannelInitializer("default");
|
||||
final EmbeddedChannel ch = new EmbeddedChannel(handler);
|
||||
final SSLEngine engine = ch.pipeline().get(SslHandler.class).engine();
|
||||
assertFalse(engine.getNeedClientAuth());
|
||||
|
@ -70,7 +70,7 @@ public class WatcherPluginDisableTests extends ESIntegTestCase {
|
||||
|
||||
public void testRestEndpoints() throws Exception {
|
||||
try {
|
||||
getRestClient().performRequest("GET", "/_xpack/watcher");
|
||||
getRestClient().performRequest("GET", "/_xpack/watcher/watch/my-watch");
|
||||
fail("request should have failed");
|
||||
} catch(ResponseException e) {
|
||||
assertThat(e.getResponse().getStatusLine().getStatusCode(), is(HttpStatus.SC_BAD_REQUEST));
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"xpack.upgrade.info": {
|
||||
"xpack.migration.get_assistance": {
|
||||
"methods": [ "GET" ],
|
||||
"url": {
|
||||
"path": "/_xpack/migration/assistance",
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"xpack.upgrade": {
|
||||
"xpack.migration.upgrade": {
|
||||
"methods": [ "POST" ],
|
||||
"url": {
|
||||
"path": "/_xpack/migration/upgrade/{index}",
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.authenticate": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-authenticate.html",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-authenticate.html",
|
||||
"methods": [ "GET" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/_authenticate",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.change_password": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-change-password.html",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-change-password.html",
|
||||
"methods": [ "PUT", "POST" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/user/{username}/_password",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.clear_cached_realms": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/current/security-api-clear-cache.html",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-cache.html",
|
||||
"methods": [ "POST" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/realm/{realms}/_clear_cache",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.clear_cached_roles": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-roles.html#security-api-clear-role-cache",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-clear-role-cache",
|
||||
"methods": [ "POST" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/role/{name}/_clear_cache",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.delete_role": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-roles.html#security-api-delete-role",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-delete-role",
|
||||
"methods": [ "DELETE" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/role/{name}",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.delete_role_mapping": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-role-mapping.html#security-api-delete-role-mapping",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-role-mapping.html#security-api-delete-role-mapping",
|
||||
"methods": [ "DELETE" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/role_mapping/{name}",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.delete_user": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-users.html#security-api-delete-user",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-delete-user",
|
||||
"methods": [ "DELETE" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/user/{username}",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.disable_user": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-disable-user.html",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-disable-user",
|
||||
"methods": [ "PUT", "POST" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/user/{username}/_disable",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.enable_user": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-enable-user.html",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-enable-user",
|
||||
"methods": [ "PUT", "POST" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/user/{username}/_enable",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.get_role": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-roles.html#security-api-get-role",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-get-role",
|
||||
"methods": [ "GET" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/role/{name}",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.get_role_mapping": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-role-mapping.html#security-api-get-role-mapping",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-role-mapping.html#security-api-get-role-mapping",
|
||||
"methods": [ "GET" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/role_mapping/{name}",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.get_token": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-tokens.html#security-api-get-token",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-tokens.html#security-api-get-token",
|
||||
"methods": [ "POST" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/oauth2/token",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.get_user": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-users.html#security-api-get-user",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-get-user",
|
||||
"methods": [ "GET" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/user/{username}",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.invalidate_token": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-tokens.html#security-api-invalidate-token",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-tokens.html#security-api-invalidate-token",
|
||||
"methods": [ "DELETE" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/oauth2/token",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.put_role": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-roles.html#security-api-put-role",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-roles.html#security-api-put-role",
|
||||
"methods": [ "PUT", "POST" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/role/{name}",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.put_role_mapping": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-role-mapping.html#security-api-put-role-mapping",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-role-mapping.html#security-api-put-role-mapping",
|
||||
"methods": [ "PUT", "POST" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/role_mapping/{name}",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"xpack.security.put_user": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/master/security-api-users.html#security-api-put-user",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html#security-api-put-user",
|
||||
"methods": [ "PUT", "POST" ],
|
||||
"url": {
|
||||
"path": "/_xpack/security/user/{username}",
|
||||
|
@ -70,6 +70,46 @@
|
||||
name: ".ml-anomalies-.write-job-crud-test-apis"
|
||||
- is_false: ''
|
||||
|
||||
---
|
||||
"Test put job with model_memory_limit as number":
|
||||
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: job-model-memory-limit-as-number
|
||||
body: >
|
||||
{
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {
|
||||
},
|
||||
"analysis_limits": {
|
||||
"model_memory_limit": 2048
|
||||
}
|
||||
}
|
||||
- match: { job_id: "job-model-memory-limit-as-number" }
|
||||
- match: { analysis_limits.model_memory_limit: "2048mb" }
|
||||
|
||||
---
|
||||
"Test put job with model_memory_limit as string":
|
||||
|
||||
- do:
|
||||
xpack.ml.put_job:
|
||||
job_id: job-model-memory-limit-as-string
|
||||
body: >
|
||||
{
|
||||
"analysis_config" : {
|
||||
"detectors" :[{"function":"count"}]
|
||||
},
|
||||
"data_description" : {
|
||||
},
|
||||
"analysis_limits": {
|
||||
"model_memory_limit": "3g"
|
||||
}
|
||||
}
|
||||
- match: { job_id: "job-model-memory-limit-as-string" }
|
||||
- match: { analysis_limits.model_memory_limit: "3072mb" }
|
||||
|
||||
---
|
||||
"Test get job API with non existing job id":
|
||||
- do:
|
||||
@ -262,7 +302,7 @@
|
||||
- match: { description: "Post update description" }
|
||||
- match: { model_plot_config.enabled: false }
|
||||
- match: { model_plot_config.terms: "foobar" }
|
||||
- match: { analysis_limits.model_memory_limit: 20 }
|
||||
- match: { analysis_limits.model_memory_limit: "20mb" }
|
||||
- match: { analysis_config.categorization_filters: ["cat3.*"] }
|
||||
- match: { analysis_config.detectors.0.detector_rules.0.target_field_name: "airline" }
|
||||
- match: { analysis_config.detectors.0.detector_index: 0 }
|
||||
|
@ -37,14 +37,14 @@ setup:
|
||||
---
|
||||
"Upgrade info - all":
|
||||
- do:
|
||||
xpack.upgrade.info: { index: _all }
|
||||
xpack.migration.get_assistance: { index: _all }
|
||||
|
||||
- length: { indices: 0 }
|
||||
|
||||
---
|
||||
"Upgrade info - all, but treat test2 as kibana":
|
||||
- do:
|
||||
xpack.upgrade.info: { index: _all, kibana_indices: test2 }
|
||||
xpack.migration.get_assistance: { index: _all, kibana_indices: test2 }
|
||||
|
||||
- length: { indices: 1 }
|
||||
- match: { indices.test2.action_required: "upgrade" }
|
||||
@ -53,7 +53,7 @@ setup:
|
||||
"Upgrade test2 as kibana index":
|
||||
|
||||
- do:
|
||||
xpack.upgrade: { index: test2, kibana_indices: test2 }
|
||||
xpack.migration.upgrade: { index: test2, kibana_indices: test2 }
|
||||
|
||||
- match: { total: 1 }
|
||||
- match: { created: 1 }
|
||||
|
@ -4,12 +4,38 @@
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
|
||||
# ensure index exists, but ignore if it does already
|
||||
# this test should ensure the watch is missing, while the index is there
|
||||
# ensure index exists by creating a different watch
|
||||
- do:
|
||||
indices.create:
|
||||
index: .watches
|
||||
ignore: 400
|
||||
xpack.watcher.put_watch:
|
||||
id: "other"
|
||||
body: >
|
||||
{
|
||||
"trigger": {
|
||||
"schedule": {
|
||||
"hourly": {
|
||||
"minute": [ 0, 5 ]
|
||||
}
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"simple": {
|
||||
"payload": {
|
||||
"send": "yes"
|
||||
}
|
||||
}
|
||||
},
|
||||
"condition": {
|
||||
"always": {}
|
||||
},
|
||||
"actions": {
|
||||
"test_index": {
|
||||
"index": {
|
||||
"index": "test",
|
||||
"doc_type": "test2"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- do:
|
||||
catch: missing
|
||||
|
@ -14,99 +14,13 @@ dependencies {
|
||||
testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'testArtifacts')
|
||||
}
|
||||
|
||||
Closure changePasswordAndWaitWithAuth = { NodeInfo node, AntBuilder ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
|
||||
String password
|
||||
if (Version.fromString(node.nodeVersion).onOrAfter('6.0.0')) {
|
||||
password = ""
|
||||
} else {
|
||||
password = "changeme"
|
||||
}
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
HttpURLConnection httpURLConnection = null;
|
||||
try {
|
||||
httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_xpack/security/user/elastic/_password")
|
||||
.openConnection();
|
||||
httpURLConnection.setRequestProperty("Authorization", "Basic " +
|
||||
Base64.getEncoder().encodeToString("elastic:${password}".getBytes(StandardCharsets.UTF_8)));
|
||||
httpURLConnection.setRequestMethod("PUT");
|
||||
httpURLConnection.setDoOutput(true);
|
||||
httpURLConnection.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
|
||||
|
||||
httpURLConnection.connect();
|
||||
OutputStream out = httpURLConnection.getOutputStream();
|
||||
out.write("{\"password\": \"x-pack-test-password\"}".getBytes(StandardCharsets.UTF_8));
|
||||
out.close()
|
||||
|
||||
if (httpURLConnection.getResponseCode() == 200) {
|
||||
break
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
httpURLConnection.disconnect()
|
||||
if (i == 9) {
|
||||
logger.error("final attempt to set password", e)
|
||||
} else {
|
||||
logger.debug("failed to set elastic password", e)
|
||||
}
|
||||
} finally {
|
||||
if (httpURLConnection != null) {
|
||||
httpURLConnection.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// did not start, so wait a bit before trying again
|
||||
Thread.sleep(500L);
|
||||
}
|
||||
|
||||
// wait up to twenty seconds
|
||||
final long stopTime = System.currentTimeMillis() + 20000L;
|
||||
Exception lastException = null;
|
||||
|
||||
while (System.currentTimeMillis() < stopTime) {
|
||||
lastException = null;
|
||||
// we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned
|
||||
HttpURLConnection httpURLConnection = null;
|
||||
try {
|
||||
httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=${node.config.numNodes}&wait_for_status=yellow").openConnection();
|
||||
httpURLConnection.setRequestProperty("Authorization", "Basic " +
|
||||
Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8)));
|
||||
httpURLConnection.setRequestMethod("GET");
|
||||
httpURLConnection.setConnectTimeout(1000);
|
||||
httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes!
|
||||
httpURLConnection.connect();
|
||||
if (httpURLConnection.getResponseCode() == 200) {
|
||||
tmpFile.withWriter StandardCharsets.UTF_8.name(), {
|
||||
it.write(httpURLConnection.getInputStream().getText(StandardCharsets.UTF_8.name()))
|
||||
}
|
||||
break;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.debug("failed to call cluster health", e)
|
||||
lastException = e
|
||||
} finally {
|
||||
if (httpURLConnection != null) {
|
||||
httpURLConnection.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// did not start, so wait a bit before trying again
|
||||
Thread.sleep(500L);
|
||||
}
|
||||
if (tmpFile.exists() == false && lastException != null) {
|
||||
logger.error("final attempt of calling cluster health failed", lastException)
|
||||
}
|
||||
return tmpFile.exists()
|
||||
}
|
||||
|
||||
|
||||
Closure waitWithAuth = { NodeInfo node, AntBuilder ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
|
||||
// wait up to twenty seconds
|
||||
final long stopTime = System.currentTimeMillis() + 20000L;
|
||||
Exception lastException = null;
|
||||
|
||||
while (System.currentTimeMillis() < stopTime) {
|
||||
lastException = null;
|
||||
// we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned
|
||||
@ -114,7 +28,7 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant ->
|
||||
try {
|
||||
httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=${node.config.numNodes}&wait_for_status=yellow").openConnection();
|
||||
httpURLConnection.setRequestProperty("Authorization", "Basic " +
|
||||
Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8)));
|
||||
Base64.getEncoder().encodeToString("elastic:changeme".getBytes(StandardCharsets.UTF_8)));
|
||||
httpURLConnection.setRequestMethod("GET");
|
||||
httpURLConnection.setConnectTimeout(1000);
|
||||
httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes!
|
||||
@ -202,7 +116,7 @@ subprojects {
|
||||
numBwcNodes = 2
|
||||
numNodes = 2
|
||||
clusterName = 'full-cluster-restart'
|
||||
waitCondition = changePasswordAndWaitWithAuth
|
||||
waitCondition = waitWithAuth
|
||||
setting 'xpack.security.transport.ssl.enabled', 'true'
|
||||
setting 'xpack.ssl.keystore.path', 'testnode.jks'
|
||||
setting 'xpack.ssl.keystore.password', 'testnode'
|
||||
@ -277,8 +191,7 @@ subprojects {
|
||||
}
|
||||
}
|
||||
|
||||
// NORELEASE : this test must be unmuted once https://github.com/elastic/dev/issues/741 is completed
|
||||
// check.dependsOn(integTest)
|
||||
check.dependsOn(integTest)
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'runtime')
|
||||
|
@ -58,7 +58,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
|
||||
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
String token = "Basic " + Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8));
|
||||
String token = "Basic " + Base64.getEncoder().encodeToString("elastic:changeme".getBytes(StandardCharsets.UTF_8));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
// we increase the timeout here to 90 seconds to handle long waits for a green
|
||||
|
@ -22,6 +22,8 @@ import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentType.JSON;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class MlBasicMultiNodeIT extends ESRestTestCase {
|
||||
|
||||
@ -77,7 +79,7 @@ public class MlBasicMultiNodeIT extends ESRestTestCase {
|
||||
|
||||
response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
assertEquals(Collections.singletonMap("flushed", true), responseEntityToMap(response));
|
||||
assertFlushResponse(response, true, 1403481600000L);
|
||||
|
||||
response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close",
|
||||
Collections.singletonMap("timeout", "20s"));
|
||||
@ -204,7 +206,7 @@ public class MlBasicMultiNodeIT extends ESRestTestCase {
|
||||
|
||||
response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_flush");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
assertEquals(Collections.singletonMap("flushed", true), responseEntityToMap(response));
|
||||
assertFlushResponse(response, true, 1403481600000L);
|
||||
|
||||
response = client().performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_close",
|
||||
Collections.singletonMap("timeout", "20s"));
|
||||
@ -315,4 +317,11 @@ public class MlBasicMultiNodeIT extends ESRestTestCase {
|
||||
return XContentHelper.convertToMap(JSON.xContent(), response.getEntity().getContent(), false);
|
||||
}
|
||||
|
||||
private static void assertFlushResponse(Response response, boolean expectedFlushed, long expectedLastFinalizedBucketEnd)
|
||||
throws IOException {
|
||||
Map<String, Object> asMap = responseEntityToMap(response);
|
||||
assertThat(asMap.size(), equalTo(2));
|
||||
assertThat(asMap.get("flushed"), is(true));
|
||||
assertThat(asMap.get("last_finalized_bucket_end"), equalTo(expectedLastFinalizedBucketEnd));
|
||||
}
|
||||
}
|
||||
|
@ -23,6 +23,7 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant ->
|
||||
Exception lastException = null;
|
||||
|
||||
while (System.currentTimeMillis() < stopTime) {
|
||||
|
||||
lastException = null;
|
||||
// we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned
|
||||
HttpURLConnection httpURLConnection = null;
|
||||
@ -30,96 +31,7 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant ->
|
||||
// TODO this sucks having to hardcode number of nodes, but node.config.numNodes isn't necessarily accurate for rolling
|
||||
httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=2&wait_for_status=yellow").openConnection();
|
||||
httpURLConnection.setRequestProperty("Authorization", "Basic " +
|
||||
Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8)));
|
||||
httpURLConnection.setRequestMethod("GET");
|
||||
httpURLConnection.setConnectTimeout(1000);
|
||||
httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes!
|
||||
httpURLConnection.connect();
|
||||
if (httpURLConnection.getResponseCode() == 200) {
|
||||
tmpFile.withWriter StandardCharsets.UTF_8.name(), {
|
||||
it.write(httpURLConnection.getInputStream().getText(StandardCharsets.UTF_8.name()))
|
||||
}
|
||||
break;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.debug("failed to call cluster health", e)
|
||||
lastException = e
|
||||
} finally {
|
||||
if (httpURLConnection != null) {
|
||||
httpURLConnection.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// did not start, so wait a bit before trying again
|
||||
Thread.sleep(500L);
|
||||
}
|
||||
if (tmpFile.exists() == false && lastException != null) {
|
||||
logger.error("final attempt of calling cluster health failed", lastException)
|
||||
}
|
||||
return tmpFile.exists()
|
||||
}
|
||||
|
||||
|
||||
Closure changePasswordAndWaitWithAuth = { NodeInfo node, AntBuilder ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
|
||||
String password
|
||||
if (Version.fromString(node.nodeVersion).onOrAfter('6.0.0')) {
|
||||
password = ""
|
||||
} else {
|
||||
password = "changeme"
|
||||
}
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
HttpURLConnection httpURLConnection = null;
|
||||
try {
|
||||
httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_xpack/security/user/elastic/_password")
|
||||
.openConnection();
|
||||
httpURLConnection.setRequestProperty("Authorization", "Basic " +
|
||||
Base64.getEncoder().encodeToString("elastic:${password}".getBytes(StandardCharsets.UTF_8)));
|
||||
httpURLConnection.setRequestMethod("PUT");
|
||||
httpURLConnection.setDoOutput(true);
|
||||
httpURLConnection.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
|
||||
|
||||
httpURLConnection.connect();
|
||||
OutputStream out = httpURLConnection.getOutputStream();
|
||||
out.write("{\"password\": \"x-pack-test-password\"}".getBytes(StandardCharsets.UTF_8));
|
||||
out.close()
|
||||
|
||||
if (httpURLConnection.getResponseCode() == 200) {
|
||||
break
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
httpURLConnection.disconnect()
|
||||
if (i == 9) {
|
||||
logger.error("final attempt to set password", e)
|
||||
} else {
|
||||
logger.debug("failed to set elastic password", e)
|
||||
}
|
||||
} finally {
|
||||
if (httpURLConnection != null) {
|
||||
httpURLConnection.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// did not start, so wait a bit before trying again
|
||||
Thread.sleep(500L);
|
||||
}
|
||||
|
||||
// wait up to twenty seconds
|
||||
final long stopTime = System.currentTimeMillis() + 20000L;
|
||||
Exception lastException = null;
|
||||
|
||||
while (System.currentTimeMillis() < stopTime) {
|
||||
lastException = null;
|
||||
// we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned
|
||||
HttpURLConnection httpURLConnection = null;
|
||||
try {
|
||||
// TODO this sucks having to hardcode number of nodes, but node.config.numNodes isn't necessarily accurate for rolling
|
||||
httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=2&wait_for_status=yellow").openConnection();
|
||||
httpURLConnection.setRequestProperty("Authorization", "Basic " +
|
||||
Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8)));
|
||||
Base64.getEncoder().encodeToString("elastic:changeme".getBytes(StandardCharsets.UTF_8)));
|
||||
httpURLConnection.setRequestMethod("GET");
|
||||
httpURLConnection.setConnectTimeout(1000);
|
||||
httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes!
|
||||
@ -207,7 +119,7 @@ subprojects {
|
||||
numBwcNodes = 2
|
||||
numNodes = 2
|
||||
clusterName = 'rolling-upgrade'
|
||||
waitCondition = changePasswordAndWaitWithAuth
|
||||
waitCondition = waitWithAuth
|
||||
setting 'xpack.security.transport.ssl.enabled', 'true'
|
||||
setting 'xpack.ssl.keystore.path', 'testnode.jks'
|
||||
setting 'xpack.ssl.keystore.password', 'testnode'
|
||||
@ -316,8 +228,8 @@ subprojects {
|
||||
dependsOn = ["v${wireCompatVersions[-1]}#bwcTest"]
|
||||
}
|
||||
}
|
||||
// NORELEASE : this test must be unmuted once https://github.com/elastic/dev/issues/741 is completed
|
||||
// check.dependsOn(integTest)
|
||||
|
||||
check.dependsOn(integTest)
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'runtime')
|
||||
|
@ -56,7 +56,7 @@ public class UpgradeClusterClientYamlTestSuiteIT extends SecurityClusterClientYa
|
||||
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
String token = "Basic " + Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8));
|
||||
String token = "Basic " + Base64.getEncoder().encodeToString(("elastic:changeme").getBytes(StandardCharsets.UTF_8));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
// we increase the timeout here to 90 seconds to handle long waits for a green
|
||||
|
@ -112,7 +112,7 @@ public class WatchBackwardsCompatibilityIT extends ESRestTestCase {
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
String token = "Basic " + Base64.getEncoder()
|
||||
.encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8));
|
||||
.encodeToString(("elastic:changeme").getBytes(StandardCharsets.UTF_8));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
.build();
|
||||
|
Loading…
x
Reference in New Issue
Block a user