diff --git a/dev-tools/create_bwc_indexes.py b/dev-tools/create_bwc_indexes.py index c5662ae013a..251da452202 100644 --- a/dev-tools/create_bwc_indexes.py +++ b/dev-tools/create_bwc_indexes.py @@ -340,16 +340,13 @@ def wait_for_monitoring_index_to_fill(client, version): def wait_for_monitoring_to_index(doc_type, count): logging.info('Waiting for %s to have count(%s) = %s...' % (monitoring_name, doc_type, count)) wait_for_search(count, lambda: - client.search(index=monitoring_name, doc_type=doc_type, body={"query": {"match_all": {}}})) + client.search(index=monitoring_name, body={"query": {"term": { "type": doc_type }}})) - wait_for_monitoring_to_index('cluster_info', 1) - if parse_version(version) >= parse_version('2.1.0'): - wait_for_monitoring_to_index('node', 1) wait_for_monitoring_to_index('index_stats', 10) wait_for_monitoring_to_index('shards', 10) wait_for_monitoring_to_index('indices_stats', 3) wait_for_monitoring_to_index('node_stats', 3) - wait_for_monitoring_to_index('cluster_state', 3) + wait_for_monitoring_to_index('cluster_stats', 3) wait_for_yellow(version, client, monitoring_name) diff --git a/docs/en/ml/getting-started.asciidoc b/docs/en/ml/getting-started.asciidoc index fe1ce4374bc..9d9660dd8d4 100644 --- a/docs/en/ml/getting-started.asciidoc +++ b/docs/en/ml/getting-started.asciidoc @@ -483,12 +483,13 @@ The number of records that have been processed by the job. Memory status:: The status of the mathematical models. When you create jobs by using the APIs or by using the advanced options in {kib}, you can specify a `model_memory_limit`. -That value is the maximum amount of memory resources, in MiB, that the -mathematical models can use. Once that limit is approached, data pruning becomes -more aggressive. Upon exceeding that limit, new entities are not modeled. -The default value is `4096`. The memory status field reflects whether you have -reached or exceeded the model memory limit. It can have one of the following -values: + +That value is the maximum amount of memory resources that the mathematical +models can use. Once that limit is approached, data pruning becomes more +aggressive. Upon exceeding that limit, new entities are not modeled. For more +information about this setting, see +{ref}/ml-job-resource.html#ml-apilimits[Analysis Limits]. The memory status +field reflects whether you have reached or exceeded the model memory limit. It +can have one of the following values: + `ok`::: The models stayed below the configured value. `soft_limit`::: The models used more than 60% of the configured memory limit and older unused models will be pruned to free up space. diff --git a/docs/en/ml/limitations.asciidoc b/docs/en/ml/limitations.asciidoc index 12a4e52dfa3..d94a03a31cf 100644 --- a/docs/en/ml/limitations.asciidoc +++ b/docs/en/ml/limitations.asciidoc @@ -23,14 +23,21 @@ you work with extensively in the beta, make note of all the details so that you can re-create them successfully. [float] -=== {xpackml} features do not support cross cluster search +=== {xpackml} features do not yet support cross cluster search -You cannot use cross cluster search in either the {ml} APIs or the {ml} -features in {kib}. +At this time, you cannot use cross cluster search in either the {ml} APIs or the +{ml} features in {kib}. For more information about cross cluster search, see {ref}/modules-cross-cluster-search.html[Cross Cluster Search]. +[float] +=== {xpackml} features are not supported on tribe nodes + +You cannot use {ml} features on tribe nodes. For more information about that +type of node, see +{ref}/modules-tribe.html[Tribe node]. + [float] === Anomaly Explorer omissions and limitations //See x-pack-elasticsearch/#844 and x-pack-kibana/#1461 diff --git a/docs/en/rest-api/ml/jobresource.asciidoc b/docs/en/rest-api/ml/jobresource.asciidoc index 17e05314ee3..fbbd838dd07 100644 --- a/docs/en/rest-api/ml/jobresource.asciidoc +++ b/docs/en/rest-api/ml/jobresource.asciidoc @@ -277,26 +277,33 @@ For more information, see //<>. `model_memory_limit`:: - (long) The approximate maximum amount of memory resources that are required - for analytical processing, in MiB. Once this limit is approached, data pruning + (long or string) The approximate maximum amount of memory resources that are + required for analytical processing. Once this limit is approached, data pruning becomes more aggressive. Upon exceeding this limit, new entities are not - modeled. The default value is 4096. + modeled. The default value is `4096mb`. If you specify a number instead of a + string, the units are assumed to be MiB. Specifying a string is recommended + for clarity. If you specify a byte size unit of `b` or `kb` and the number + does not equate to a discrete number of megabytes, it is rounded down to the + closest MiB. The minimum valid value is 1 MiB. If you specify a value less + than 1 MiB, an error occurs. For more information about supported byte size + units, see + {ref}/common-options.html#byte-units[Byte size units]. [float] [[ml-apimodelplotconfig]] ==== Model Plot Config This advanced configuration option stores model information along with the -results. It provides a more detailed view into anomaly detection. +results. It provides a more detailed view into anomaly detection. -WARNING: If you enable model plot it can add considerable overhead to the performance +WARNING: If you enable model plot it can add considerable overhead to the performance of the system; it is not feasible for jobs with many entities. Model plot provides a simplified and indicative view of the model and its bounds. It does not display complex features such as multivariate correlations or multimodal data. As such, anomalies may occasionally be reported which cannot be seen in the model plot. -Model plot config can be configured when the job is created or updated later. It must be +Model plot config can be configured when the job is created or updated later. It must be disabled if performance issues are experienced. The `model_plot_config` object has the following properties: @@ -306,7 +313,7 @@ The `model_plot_config` object has the following properties: each entity that is being analyzed. By default, this is not enabled. `terms`:: - (string) Limits data collection to this comma separated list of partition or by field values. - If terms are not specified or it is an empty string, no filtering is applied. + (string) Limits data collection to this comma separated list of partition or by field values. + If terms are not specified or it is an empty string, no filtering is applied. For example, "CPU,NetworkIn,DiskWrites". This is experimental. Only the specified `terms` can be viewed when using the Single Metric Viewer. diff --git a/docs/en/rest-api/ml/update-job.asciidoc b/docs/en/rest-api/ml/update-job.asciidoc index a356fbbeeb9..fa59b094b29 100644 --- a/docs/en/rest-api/ml/update-job.asciidoc +++ b/docs/en/rest-api/ml/update-job.asciidoc @@ -23,8 +23,7 @@ The following properties can be updated after the job is created: |Name |Description |Requires Restart |`analysis_limits`: `model_memory_limit` |The approximate maximum amount of -memory resources required for analytical processing, in MiB. -See <>. | Yes +memory resources required for analytical processing. See <>. | Yes |`background_persist_interval` |Advanced configuration option. The time between each periodic persistence of the model. See <>. | Yes diff --git a/docs/en/security/authentication/active-directory-realm.asciidoc b/docs/en/security/authentication/active-directory-realm.asciidoc index 8751c34c91f..6e4de52c365 100644 --- a/docs/en/security/authentication/active-directory-realm.asciidoc +++ b/docs/en/security/authentication/active-directory-realm.asciidoc @@ -33,6 +33,12 @@ NOTE: When you use Active Directory for authentication, the username entered by the user is expected to match the `sAMAccountName` or `userPrincipalName`, not the common name. +The Active Directory realm authenticates users using an LDAP bind request. After +authenticating the user, the realm then searches to find the user's entry in +Active Directory. Once the user has been found, the Active Directory realm then +retrieves the user's group memberships from the `tokenGroups` attribute on the +user's entry in Active Directory. + To configure an `active_directory` realm: . Add a realm configuration of type `active_directory` to `elasticsearch.yml` @@ -63,13 +69,10 @@ xpack: order: 0 <1> domain_name: ad.example.com url: ldaps://ad.example.com:636 <2> - unmapped_groups_as_roles: true <3> ------------------------------------------------------------ <1> The realm order controls the order in which the configured realms are checked when authenticating a user. <2> If you don't specify the URL, it defaults to `ldap::389`. -<3> When this option is enabled, Active Directory groups are automatically mapped - to roles of the same name. + IMPORTANT: When you configure realms in `elasticsearch.yml`, only the realms you specify are used for authentication. If you also want to use the @@ -77,6 +80,42 @@ realms you specify are used for authentication. If you also want to use the . Restart Elasticsearch. +===== Configuring a Bind User +By default, all of the LDAP operations are run by the user that {security} is +authenticating. In some cases, regular users may not be able to access all of the +necessary items within Active Directory and a _bind user_ is needed. A bind user +can be configured and will be used to perform all operations other than the LDAP +bind request, which is required to authenticate the credentials provided by the user. + +The use of a bind user enables the <> to be +used with the Active Directory realm and the ability to maintain a set of pooled +connections to Active Directory. These pooled connection reduce the number of +resources that must be created and destroyed with every user authentication. + +The following example shows the configuration of a bind user through the user of the +`bind_dn` and `bind_password` settings. + +[source, yaml] +------------------------------------------------------------ +xpack: + security: + authc: + realms: + active_directory: + type: active_directory + order: 0 + domain_name: ad.example.com + url: ldaps://ad.example.com:636 + bind_dn: es_svc_user@ad.example.com <1> + bind_password: es_svc_user_password +------------------------------------------------------------ +<1> This is the user that all Active Directory search requests are executed as. + Without a bind user configured, all requests run as the user that is authenticating + with Elasticsearch. + +When a bind user is configured, connection pooling is enabled by default. +Connection pooling can be disabled using the `user_search.pool.enabled` setting. + ===== Multiple Domain Support When authenticating users across multiple domains in a forest, there are a few minor differences in the configuration and the way that users will authenticate. The `domain_name` @@ -176,6 +215,14 @@ operation are supported: failover and load balancing assuming an unencrypted connection to port 389. For example, `ldap://:389`. This settings is required when connecting using SSL/TLS or via a custom port. +| `bind_dn` | no | The DN of the user that is used to bind to Active Directory + and perform searches. Due to its potential security + impact, `bind_dn` is not exposed via the + {ref}/cluster-nodes-info.html#cluster-nodes-info[nodes info API]. +| `bind_password` | no | The password for the user that is used to bind to + Active Directory. Due to its potential security impact, + `bind_password` is not exposed via the + {ref}/cluster-nodes-info.html#cluster-nodes-info[nodes info API]. | `load_balance.type` | no | The behavior to use when there are multiple LDAP URLs defined. For supported values see <>. | `load_balance.cache_ttl` | no | When using `dns_failover` or `dns_round_robin` as the load @@ -195,12 +242,10 @@ operation are supported: failover and load balancing `(&(objectClass=user)(sAMAccountName={0}))`. For more information, see https://msdn.microsoft.com/en-us/library/aa746475(v=vs.85).aspx[Search Filter Syntax]. | `user_search.upn_filter` | no | Specifies a filter to use to lookup a user given a user principal name. - The default filter looks up `user` objects with either - a matching `userPrincipalName` or a `sAMAccountName` matching the account - portion of the user principal name. If specified, this + The default filter looks up `user` objects with + a matching `userPrincipalName`. If specified, this must be a valid LDAP user search filter, for example - `(&(objectClass=user)(sAMAccountName={0}))`. `{0}` is the value - preceding the `@` sign in the user principal name and `{1}` is + `(&(objectClass=user)(userPrincipalName={1}))`. `{1}` is the full user principal name provided by the user. For more information, see https://msdn.microsoft.com/en-us/library/aa746475(v=vs.85).aspx[Search Filter Syntax]. | `user_search.down_level_filter` | no | Specifies a filter to use to lookup a user given a down level logon name (DOMAIN\user). @@ -209,6 +254,22 @@ operation are supported: failover and load balancing must be a valid LDAP user search filter, for example `(&(objectClass=user)(sAMAccountName={0}))`. For more information, see https://msdn.microsoft.com/en-us/library/aa746475(v=vs.85).aspx[Search Filter Syntax]. +| `user_search.pool.enabled` | no | Enables or disables connection pooling for user search. When + disabled a new connection is created for every search. The + default is `true` when `bind_dn` is provided. +| `user_search.pool.size` | no | Specifies the maximum number of connections to Active Directory + server to allow in the connection pool. Defaults to `20`. +| `user_search.pool.initial_size` | no | The initial number of connections to create to Active Directory + server on startup. Defaults to `0`. Values greater than `0` + could cause startup failures if the LDAP server is down. +| `user_search.pool.health_check.enabled` | no | Enables or disables a health check on Active Directory connections in + the connection pool. Connections are checked in the + background at the specified interval. Defaults to `true`. +| `user_search.pool.health_check.dn` | no | Specifies the distinguished name to retrieve as part of + the health check. Defaults to the value of `bind_dn` if present, and if + not falls back to `user_search.base_dn`. +| `user_search.pool.health_check.interval` | no | How often to perform background checks of connections in + the pool. Defaults to `60s`. | `group_search.base_dn` | no | Specifies the context to search for groups in which the user has membership. Defaults to the root of the Active Directory domain. diff --git a/docs/en/security/authentication/ldap-realm.asciidoc b/docs/en/security/authentication/ldap-realm.asciidoc index 6bb62c768a4..0c0ff32128f 100644 --- a/docs/en/security/authentication/ldap-realm.asciidoc +++ b/docs/en/security/authentication/ldap-realm.asciidoc @@ -85,7 +85,7 @@ users, you can use User DN templates to configure the realm. The advantage of this method is that a search does not have to be performed to find the user DN. However, multiple bind operations might be needed to find the correct user DN. -To configure an `ldap` Realm with User Search: +To configure an `ldap` Realm with User DN templates: . Add a realm configuration of type `ldap` to `elasticsearch.yml` in the `xpack.security.authc.realms` namespace. At a minimum, you must set the realm `type` to @@ -119,6 +119,9 @@ xpack: . Restart Elasticsearch +IMPORTANT: The `bind_dn` setting is not used in template mode. +All LDAP operations will execute as the authenticating user. + [[ldap-load-balancing]] ===== Load Balancing and Failover @@ -288,7 +291,11 @@ failover and load balancing modes of operation. all objects contained under `base_dn`. `base` specifies that the `base_dn` is the user object, and that it is the only user considered. Defaults to `sub_tree`. -| `user_search.attribute` | no | Specifies the attribute to match with the username presented +| `user_search.filter` | no | Specifies the filter used to search the directory in attempt to match + an entry with the username provided by the user. Defaults to `(uid={0})`. + `{0}` is substituted with the username provided when searching. +| `user_search.attribute` | no | This setting is deprecated; use `user_search.filter` instead. + Specifies the attribute to match with the username presented to. Defaults to `uid`. | `user_search.pool.enabled` | no | Enables or disables connection pooling for user search. When disabled a new connection is created for every search. The diff --git a/docs/en/security/authorization/run-as-privilege.asciidoc b/docs/en/security/authorization/run-as-privilege.asciidoc index 0db5b53a9dd..0a391e7804a 100644 --- a/docs/en/security/authorization/run-as-privilege.asciidoc +++ b/docs/en/security/authorization/run-as-privilege.asciidoc @@ -8,10 +8,10 @@ users, you can use the _run as_ mechanism to restrict data access according to To "run as" (impersonate) another user, you must be able to retrieve the user from the realm you use to authenticate. Both the internal `native` and `file` realms -support this out of the box. The LDAP realm however must be configured to run in -_user search_ mode. For more information, see -<>. -The Active Directory and PKI realms do not support "run as". +support this out of the box. The LDAP realm must be configured to run in +<>. The Active Directory realm must be +<> to support _run as_. +The PKI realm does not support _run as_. To submit requests on behalf of other users, you need to have the `run_as` permission. For example, the following role grants permission to submit request diff --git a/docs/en/security/limitations.asciidoc b/docs/en/security/limitations.asciidoc index 4882c187fbc..fdede740c8f 100644 --- a/docs/en/security/limitations.asciidoc +++ b/docs/en/security/limitations.asciidoc @@ -70,9 +70,7 @@ When a user's role enables document level security for an index: Calling certain Elasticsearch APIs on an alias can potentially leak information about indices that the user isn't authorized to access. For example, when you get the mappings for an alias with the `_mapping` API, the response includes the -index name and mappings for each index that the alias applies to. Similarly, the -response to a `_field_stats` request includes the name of the underlying index, -rather than the alias name. +index name and mappings for each index that the alias applies to. Until this limitation is addressed, avoid index and field names that contain confidential or sensitive information. diff --git a/docs/en/security/tribe-clients-integrations/cross-cluster.asciidoc b/docs/en/security/tribe-clients-integrations/cross-cluster.asciidoc index 6ac48c598ef..f113cd6cd2d 100644 --- a/docs/en/security/tribe-clients-integrations/cross-cluster.asciidoc +++ b/docs/en/security/tribe-clients-integrations/cross-cluster.asciidoc @@ -15,6 +15,12 @@ roles are passed to the remote clusters. A remote cluster checks the user's roles against its local role definitions to determine which indices the user is allowed to access. + + [WARNING] +This feature was added as Beta in Elasticsearch `v5.3` with further +improvements made in 5.4 and 5.5. It requires gateway eligible nodes to be on +`v5.5` onwards. + To use cross cluster search with secured clusters: * Install {xpack} on every node in each connected cluster. diff --git a/docs/en/settings/security-settings.asciidoc b/docs/en/settings/security-settings.asciidoc index 89a5ad62bdb..42d3987513f 100644 --- a/docs/en/settings/security-settings.asciidoc +++ b/docs/en/settings/security-settings.asciidoc @@ -155,6 +155,7 @@ to `1h`. `bind_dn`:: The DN of the user that will be used to bind to the LDAP and perform searches. +Only applicable in {xpack-ref}/ldap-realm.html#ldap-user-search[user search mode]. If this is not specified, an anonymous bind will be attempted. Defaults to Empty. @@ -187,13 +188,19 @@ The scope of the user search. Valid values are `sub_tree`, `one_level` or `base` specifies that the `base_dn` is the user object, and that it is the only user considered. Defaults to `sub_tree`. +`user_search.filter`:: +Specifies the filter used to search the directory in attempt to match +an entry with the username provided by the user. Defaults to `(uid={0})`. +`{0}` is substituted with the username provided when searching. + `user_search.attribute`:: +This setting is deprecated; use `user_search.filter` instead. The attribute to match with the username presented to. Defaults to `uid`. `user_search.pool.enabled`:: Enables or disables connection pooling for user search. When disabled a new connection is created for every search. The -default is `true`. +default is `true` when `bind_dn` is provided. `user_search.pool.size`:: The maximum number of connections to the LDAP server to allow in the @@ -201,7 +208,7 @@ connection pool. Defaults to `20`. `user_search.pool.initial_size`:: The initial number of connections to create to the LDAP server on startup. -Defaults to `5`. +Defaults to `0`. `user_search.pool.health_check.enabled`:: Flag to enable or disable a health check on LDAP connections in the connection @@ -210,12 +217,13 @@ Defaults to `true`. `user_search.pool.health_check.dn`:: The distinguished name to be retrieved as part of the health check. -Defaults to the value of `bind_dn`. Required if `bind_dn` is not -specified. +Defaults to the value of `bind_dn` if present, and if +not falls back to `user_search.base_dn`. `user_search.pool.health_check.interval`:: The interval to perform background checks of connections in the pool. Defaults to `60s`. + `group_search.base_dn`:: The container DN to search for groups in which the user has membership. When this element is absent, Security searches for the attribute specified by @@ -353,6 +361,14 @@ The domain name of Active Directory. The cluster can derive the URL and `user_search_dn` fields from values in this element if those fields are not otherwise specified. Required. +`bind_dn`:: +The DN of the user that will be used to bind to Active Directory and perform searches. +Defaults to Empty. + +`bind_password`:: +The password for the user that will be used to bind to Active Directory. +Defaults to Empty. + `unmapped_groups_as_roles`:: Takes a boolean variable. When this element is set to `true`, the names of any LDAP groups that are not referenced in a role-mapping _file_ are used as role @@ -380,12 +396,10 @@ filter looks up `user` objects with either `sAMAccountName` or `user_search.upn_filter`:: Specifies a filter to use to lookup a user given a user principal name. -The default filter looks up `user` objects with either -a matching `userPrincipalName` or a `sAMAccountName` matching the account -portion of the user principal name. If specified, this +The default filter looks up `user` objects with +a matching `userPrincipalName`. If specified, this must be a valid LDAP user search filter, for example -`(&(objectClass=user)(sAMAccountName={0}))`. `{0}` is the value preceding the -`@` sign in the user principal name and `{1}` is the full user principal name +`(&(objectClass=user)(userPrincipalName={1}))`. `{1}` is the full user principal name provided by the user. `user_search.down_level_filter`:: @@ -395,6 +409,32 @@ Specifies a filter to use to lookup a user given a down level logon name must be a valid LDAP user search filter, for example `(&(objectClass=user)(sAMAccountName={0}))`. +`user_search.pool.enabled`:: +Enables or disables connection pooling for user search. When +disabled a new connection is created for every search. The +default is `true` when `bind_dn` is provided. + +`user_search.pool.size`:: +The maximum number of connections to the Active Directory server to allow in the +connection pool. Defaults to `20`. + +`user_search.pool.initial_size`:: +The initial number of connections to create to the Active Directory server on startup. +Defaults to `0`. + +`user_search.pool.health_check.enabled`:: +Flag to enable or disable a health check on Active Directory connections in the connection +pool. Connections are checked in the background at the specified interval. +Defaults to `true`. + +`user_search.pool.health_check.dn`:: +The distinguished name to be retrieved as part of the health check. +Defaults to the value of `bind_dn` if it is a distinguished name. + +`user_search.pool.health_check.interval`:: +The interval to perform background checks of connections in the pool. +Defaults to `60s`. + `group_search.base_dn`:: The context to search for groups in which the user has membership. Defaults to the root of the Active Directory domain. diff --git a/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc b/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc index ff68ea89221..041a8ec81a7 100644 --- a/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc +++ b/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc @@ -2,7 +2,7 @@ === Watching Event Data If you are indexing event data, such as log messages, network traffic, or a web feed, you can create a watch to email notifications when certain events occur. -For example, if you index a feed of RSPVs for meetup events happening around the world, you can create a watch that alerts you to interesting events. +For example, if you index a feed of RSVPs for meetup events happening around the world, you can create a watch that alerts you to interesting events. To index the meetup data, you can use https://www.elastic.co/products/logstash[Logstash] to ingest live data from the Meetup.com streaming API, `http://stream.meetup.com/2/rsvps`. diff --git a/plugin/build.gradle b/plugin/build.gradle index 4c28ba60fe5..a16ab1ad0e4 100644 --- a/plugin/build.gradle +++ b/plugin/build.gradle @@ -28,13 +28,12 @@ dependencyLicenses { mapping from: /bc.*/, to: 'bouncycastle' mapping from: /owasp-java-html-sanitizer.*/, to: 'owasp-java-html-sanitizer' mapping from: /transport-netty.*/, to: 'elasticsearch' - mapping from: /rest.*/, to: 'elasticsearch' + mapping from: /elasticsearch-rest-client.*/, to: 'elasticsearch' mapping from: /http.*/, to: 'httpclient' // pulled in by rest client mapping from: /commons-.*/, to: 'commons' // pulled in by rest client - mapping from: /sniffer.*/, to: 'elasticsearch' - ignoreSha 'rest' + ignoreSha 'elasticsearch-rest-client' ignoreSha 'transport-netty4' - ignoreSha 'sniffer' + ignoreSha 'elasticsearch-rest-client-sniffer' } licenseHeaders { @@ -75,8 +74,8 @@ dependencies { testCompile 'com.google.code.findbugs:jsr305:3.0.1' // monitoring deps - compile "org.elasticsearch.client:rest:${version}" - compile "org.elasticsearch.client:sniffer:${version}" + compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" + compile "org.elasticsearch.client:elasticsearch-rest-client-sniffer:${version}" // ml deps compile 'net.sf.supercsv:super-csv:2.4.0' @@ -206,54 +205,21 @@ integTestCluster { setting 'xpack.monitoring.exporters._local.type', 'local' setting 'xpack.monitoring.exporters._local.enabled', 'false' setting 'xpack.monitoring.collection.interval', '-1' + keystoreSetting 'bootstrap.password', 'x-pack-test-password' distribution = 'zip' // this is important since we use the reindex module in ML + setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser' + waitCondition = { NodeInfo node, AntBuilder ant -> File tmpFile = new File(node.cwd, 'wait.success') - for (int i = 0; i < 10; i++) { - HttpURLConnection httpURLConnection = null; - try { - httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_xpack/security/user/elastic/_password") - .openConnection(); - httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("elastic:".getBytes(StandardCharsets.UTF_8))); - httpURLConnection.setRequestMethod("PUT"); - httpURLConnection.setDoOutput(true); - httpURLConnection.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); - - httpURLConnection.connect(); - OutputStream out = httpURLConnection.getOutputStream(); - out.write("{\"password\": \"x-pack-test-password\"}".getBytes(StandardCharsets.UTF_8)); - out.close() - - if (httpURLConnection.getResponseCode() == 200) { - break - } - } catch (Exception e) { - httpURLConnection.disconnect() - if (i == 9) { - logger.error("final attempt to set elastic password", e) - } else { - logger.debug("failed to set elastic password", e) - } - } finally { - if (httpURLConnection != null) { - httpURLConnection.disconnect(); - } - } - - // did not start, so wait a bit before trying again - Thread.sleep(500L); - } - for (int i = 0; i < 10; i++) { // we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned HttpURLConnection httpURLConnection = null; try { httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=${numNodes}&wait_for_status=yellow").openConnection(); httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("elastic:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); + Base64.getEncoder().encodeToString("x_pack_rest_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); httpURLConnection.setRequestMethod("GET"); httpURLConnection.connect(); if (httpURLConnection.getResponseCode() == 200) { @@ -363,4 +329,5 @@ run { setting 'xpack.security.enabled', 'true' setting 'xpack.monitoring.enabled', 'true' setting 'xpack.watcher.enabled', 'true' + keystoreSetting 'bootstrap.password', 'password' } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java b/plugin/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java index 1f1060d57b0..804bf3c0a50 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/common/text/TextTemplate.java @@ -47,7 +47,7 @@ public class TextTemplate implements ToXContent { if (params == null) { params = new HashMap<>(); } - this.script = new Script(type, Script.DEFAULT_TEMPLATE_LANG, template, options, params); + this.script = new Script(type, type == ScriptType.STORED ? null : Script.DEFAULT_TEMPLATE_LANG, template, options, params); this.inlineTemplate = null; } @@ -116,13 +116,6 @@ public class TextTemplate implements ToXContent { return new TextTemplate(parser.text()); } else { Script template = Script.parse(parser, Script.DEFAULT_TEMPLATE_LANG); - - // for deprecation of stored script namespaces the default lang is ignored, - // so the template lang must be set for a stored script - if (template.getType() == ScriptType.STORED) { - template = new Script(ScriptType.STORED, Script.DEFAULT_TEMPLATE_LANG, template.getIdOrCode(), template.getParams()); - } - return new TextTemplate(template); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/common/text/TextTemplateEngine.java b/plugin/src/main/java/org/elasticsearch/xpack/common/text/TextTemplateEngine.java index 89ec30d4400..0906d99eb3b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/common/text/TextTemplateEngine.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/common/text/TextTemplateEngine.java @@ -51,7 +51,8 @@ public class TextTemplateEngine extends AbstractComponent { options.put(Script.CONTENT_TYPE_OPTION, mediaType); } - Script script = new Script(textTemplate.getType(), "mustache", template, options, mergedModel); + Script script = new Script(textTemplate.getType(), + textTemplate.getType() == ScriptType.STORED ? null : "mustache", template, options, mergedModel); TemplateScript.Factory compiledTemplate = service.compile(script, Watcher.SCRIPT_TEMPLATE_CONTEXT); return compiledTemplate.newInstance(model).execute(); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index f1dcf25a6b2..35326ee25a0 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -6,15 +6,13 @@ package org.elasticsearch.xpack.deprecation; import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -35,39 +33,27 @@ public class DeprecationChecks { private DeprecationChecks() { } - static List, ClusterState, DeprecationIssue>> CLUSTER_SETTINGS_CHECKS = + static List> CLUSTER_SETTINGS_CHECKS = Collections.unmodifiableList(Arrays.asList( - // STUB: TODO(talevy): add checks + // STUB )); - static List, ClusterState, DeprecationIssue>> NODE_SETTINGS_CHECKS = + static List, List, DeprecationIssue>> NODE_SETTINGS_CHECKS = Collections.unmodifiableList(Arrays.asList( - // STUB: TODO(talevy): add checks + // STUB )); @SuppressWarnings("unchecked") static List> INDEX_SETTINGS_CHECKS = Collections.unmodifiableList(Arrays.asList( - indexMetaData -> { - List issues = new ArrayList<>(); - if (indexMetaData.getCreationVersion().onOrBefore(Version.V_5_6_0)) { - for (ObjectCursor mappingMetaData : indexMetaData.getMappings().values()) { - Map sourceAsMap = mappingMetaData.value.sourceAsMap(); - ((Map) sourceAsMap.getOrDefault("properties", Collections.emptyMap())) - .forEach((key, value) -> { - Map valueMap = ((Map) value); - if ("boolean".equals(valueMap.get("type"))) { - issues.add("type: " + mappingMetaData.value.type() + ", field: " + key); - } - }); - } - } - return new DeprecationIssue(DeprecationIssue.Level.INFO, "Coercion of boolean fields", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_mappings_changes.html#_coercion_of_boolean_fields", - Arrays.toString(issues.toArray())); - } - )); + IndexDeprecationChecks::allMetaFieldIsDisabledByDefaultCheck, + IndexDeprecationChecks::baseSimilarityDefinedCheck, + IndexDeprecationChecks::coercionCheck, + IndexDeprecationChecks::dynamicTemplateWithMatchMappingTypeCheck, + IndexDeprecationChecks::includeInAllCheck, + IndexDeprecationChecks::indexSharedFileSystemCheck, + IndexDeprecationChecks::indexStoreTypeCheck, + IndexDeprecationChecks::storeThrottleSettingsCheck)); /** * helper utility function to reduce repeat of running a specific {@link Set} of checks. diff --git a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 74d95dee98b..61855746ec0 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -6,12 +6,17 @@ package org.elasticsearch.xpack.deprecation; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; @@ -34,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.node.NodeService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.XPackPlugin; @@ -153,6 +159,7 @@ public class DeprecationInfoAction extends Action nodesInfo, ClusterState state, + static DeprecationInfoAction.Response from(List nodesInfo, List nodesStats, ClusterState state, IndexNameExpressionResolver indexNameExpressionResolver, String[] indices, IndicesOptions indicesOptions, - List, ClusterState,DeprecationIssue>>clusterSettingsChecks, - List, ClusterState, DeprecationIssue>> nodeSettingsChecks, + List>clusterSettingsChecks, + List, List, DeprecationIssue>> nodeSettingsChecks, List> indexSettingsChecks) { List clusterSettingsIssues = filterChecks(clusterSettingsChecks, - (c) -> c.apply(nodesInfo, state)); + (c) -> c.apply(state)); List nodeSettingsIssues = filterChecks(nodeSettingsChecks, - (c) -> c.apply(nodesInfo, state)); + (c) -> c.apply(nodesInfo, nodesStats)); String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(state, indicesOptions, indices); @@ -305,17 +312,24 @@ public class DeprecationInfoAction extends Action listener) { if (licenseState.isDeprecationAllowed()) { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest("_local").settings(true).plugins(true); - client.admin().cluster().nodesInfo(nodesInfoRequest, ActionListener.wrap(nodesInfoResponse -> { - // if there's a failure, then we failed to work with the - // _local node (guaranteed a single exception) - if (nodesInfoResponse.hasFailures()) { - throw nodesInfoResponse.failures().get(0); - } + NodesStatsRequest nodesStatsRequest = new NodesStatsRequest("_local").fs(true); - listener.onResponse(Response.from(nodesInfoResponse.getNodes(), state, - indexNameExpressionResolver, request.indices(), request.indicesOptions(), - CLUSTER_SETTINGS_CHECKS, NODE_SETTINGS_CHECKS, INDEX_SETTINGS_CHECKS)); - }, listener::onFailure)); + client.admin().cluster().nodesInfo(nodesInfoRequest, ActionListener.wrap( + nodesInfoResponse -> { + if (nodesInfoResponse.hasFailures()) { + throw nodesInfoResponse.failures().get(0); + } + client.admin().cluster().nodesStats(nodesStatsRequest, ActionListener.wrap( + nodesStatsResponse -> { + if (nodesStatsResponse.hasFailures()) { + throw nodesStatsResponse.failures().get(0); + } + listener.onResponse(Response.from(nodesInfoResponse.getNodes(), + nodesStatsResponse.getNodes(), state, indexNameExpressionResolver, + request.indices(), request.indicesOptions(), CLUSTER_SETTINGS_CHECKS, + NODE_SETTINGS_CHECKS, INDEX_SETTINGS_CHECKS)); + }, listener::onFailure)); + },listener::onFailure)); } else { listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.DEPRECATION)); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java new file mode 100644 index 00000000000..1804c15f181 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -0,0 +1,241 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.deprecation; + + +import com.carrotsearch.hppc.cursors.ObjectCursor; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.AllFieldMapper; +import org.elasticsearch.index.mapper.DynamicTemplate; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Function; + +/** + * Index-specific deprecation checks + */ +public class IndexDeprecationChecks { + + private static void fieldLevelMappingIssue(IndexMetaData indexMetaData, BiConsumer> checker) { + for (ObjectCursor mappingMetaData : indexMetaData.getMappings().values()) { + Map sourceAsMap = mappingMetaData.value.sourceAsMap(); + checker.accept(mappingMetaData.value, sourceAsMap); + } + } + + /** + * iterates through the "properties" field of mappings and returns any predicates that match in the + * form of issue-strings. + * + * @param type the document type + * @param parentMap the mapping to read properties from + * @param predicate the predicate to check against for issues, issue is returned if predicate evaluates to true + * @return a list of issues found in fields + */ + @SuppressWarnings("unchecked") + private static List findInPropertiesRecursively(String type, Map parentMap, + Function, Boolean> predicate) { + List issues = new ArrayList<>(); + Map properties = (Map) parentMap.get("properties"); + if (properties == null) { + return issues; + } + for (Map.Entry entry : properties.entrySet()) { + Map valueMap = (Map) entry.getValue(); + if (predicate.apply(valueMap)) { + issues.add("[type: " + type + ", field: " + entry.getKey() + "]"); + } + + Map values = (Map) valueMap.get("fields"); + if (values != null) { + for (Map.Entry multifieldEntry : values.entrySet()) { + Map multifieldValueMap = (Map) multifieldEntry.getValue(); + if (predicate.apply(multifieldValueMap)) { + issues.add("[type: " + type + ", field: " + entry.getKey() + ", multifield: " + multifieldEntry.getKey() + "]"); + } + if (multifieldValueMap.containsKey("properties")) { + issues.addAll(findInPropertiesRecursively(type, multifieldValueMap, predicate)); + } + } + } + if (valueMap.containsKey("properties")) { + issues.addAll(findInPropertiesRecursively(type, valueMap, predicate)); + } + } + + return issues; + } + + static DeprecationIssue coercionCheck(IndexMetaData indexMetaData) { + if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1)) { + List issues = new ArrayList<>(); + fieldLevelMappingIssue(indexMetaData, (mappingMetaData, sourceAsMap) -> { + issues.addAll(findInPropertiesRecursively(mappingMetaData.type(), sourceAsMap, + property -> "boolean".equals(property.get("type")))); + }); + if (issues.size() > 0) { + return new DeprecationIssue(DeprecationIssue.Level.INFO, "Coercion of boolean fields", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_mappings_changes.html#_coercion_of_boolean_fields", + issues.toString()); + } + } + return null; + } + + @SuppressWarnings("unchecked") + static DeprecationIssue allMetaFieldIsDisabledByDefaultCheck(IndexMetaData indexMetaData) { + if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1)) { + List issues = new ArrayList<>(); + fieldLevelMappingIssue(indexMetaData, (mappingMetaData, sourceAsMap) -> { + Map allMetaData = (Map) sourceAsMap.getOrDefault("_all", Collections.emptyMap()); + Object enabledObj = allMetaData.get("enabled"); + if (enabledObj != null) { + enabledObj = Booleans.parseBooleanLenient(enabledObj.toString(), + AllFieldMapper.Defaults.ENABLED.enabled); + } + if (Boolean.TRUE.equals(enabledObj)) { + issues.add(mappingMetaData.type()); + } + }); + if (issues.size() > 0) { + return new DeprecationIssue(DeprecationIssue.Level.INFO, + "The _all meta field is disabled by default on indices created in 6.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_mappings_changes.html#_the_literal__all_literal_meta_field_is_now_disabled_by_default", + "types: " + issues.toString()); + } + } + return null; + } + + static DeprecationIssue includeInAllCheck(IndexMetaData indexMetaData) { + if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1)) { + List issues = new ArrayList<>(); + fieldLevelMappingIssue(indexMetaData, (mappingMetaData, sourceAsMap) -> { + issues.addAll(findInPropertiesRecursively(mappingMetaData.type(), sourceAsMap, + property -> property.containsKey("include_in_all"))); + }); + if (issues.size() > 0) { + return new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "The [include_in_all] mapping parameter is now disallowed", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_mappings_changes.html#_the_literal_include_in_all_literal_mapping_parameter_is_now_disallowed", + issues.toString()); + } + } + return null; + } + + static DeprecationIssue dynamicTemplateWithMatchMappingTypeCheck(IndexMetaData indexMetaData) { + if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1)) { + List issues = new ArrayList<>(); + fieldLevelMappingIssue(indexMetaData, (mappingMetaData, sourceAsMap) -> { + List dynamicTemplates = (List) mappingMetaData + .getSourceAsMap().getOrDefault("dynamic_templates", Collections.emptyList()); + for (Object template : dynamicTemplates) { + for (Map.Entry prop : ((Map) template).entrySet()) { + Map val = (Map) prop.getValue(); + if (val.containsKey("match_mapping_type")) { + Object mappingMatchType = val.get("match_mapping_type"); + boolean isValidMatchType = Arrays.stream(DynamicTemplate.XContentFieldType.values()) + .anyMatch(v -> v.toString().equals(mappingMatchType)); + if (isValidMatchType == false) { + issues.add("type: " + mappingMetaData.type() + ", dynamicFieldDefinition" + + prop.getKey() + ", unknown match_mapping_type[" + mappingMatchType + "]"); + } + } + } + } + }); + if (issues.size() > 0) { + return new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "Unrecognized match_mapping_type options not silently ignored", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_mappings_changes.html" + + "#_unrecognized_literal_match_mapping_type_literal_options_not_silently_ignored", + issues.toString()); + } + } + return null; + } + + @SuppressWarnings("unchecked") + static DeprecationIssue baseSimilarityDefinedCheck(IndexMetaData indexMetaData) { + if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1)) { + Settings settings = indexMetaData.getSettings().getAsSettings("index.similarity.base"); + if (settings.size() > 0) { + return new DeprecationIssue(DeprecationIssue.Level.WARNING, + "The base similarity is now ignored as coords and query normalization have been removed." + + "If provided, this setting will be ignored and issue a deprecation warning", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_settings_changes.html#_similarity_settings", null); + + } + } + return null; + } + + @SuppressWarnings("unchecked") + static DeprecationIssue indexStoreTypeCheck(IndexMetaData indexMetaData) { + if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1) && + indexMetaData.getSettings().get("index.store.type") != null) { + return new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "The default index.store.type has been removed. If you were using it, " + + "we advise that you simply remove it from your index settings and Elasticsearch" + + "will use the best store implementation for your operating system.", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_settings_changes.html#_store_settings", null); + + } + return null; + } + + @SuppressWarnings("unchecked") + static DeprecationIssue storeThrottleSettingsCheck(IndexMetaData indexMetaData) { + if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1)) { + Settings settings = indexMetaData.getSettings(); + Settings throttleSettings = settings.getAsSettings("index.store.throttle"); + ArrayList foundSettings = new ArrayList<>(); + if (throttleSettings.get("max_bytes_per_sec") != null) { + foundSettings.add("index.store.throttle.max_bytes_per_sec"); + } + if (throttleSettings.get("type") != null) { + foundSettings.add("index.store.throttle.type"); + } + + if (foundSettings.isEmpty() == false) { + return new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "index.store.throttle settings are no longer recognized. these settings should be removed", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_settings_changes.html#_store_throttling_settings", "present settings: " + foundSettings); + } + } + return null; + } + + @SuppressWarnings("unchecked") + static DeprecationIssue indexSharedFileSystemCheck(IndexMetaData indexMetaData) { + if (indexMetaData.getCreationVersion().before(Version.V_6_0_0_alpha1) && + indexMetaData.getSettings().get("index.shared_filesystem") != null) { + return new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "[index.shared_filesystem] setting should be removed", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_settings_changes.html#_shadow_replicas_have_been_removed", null); + + } + return null; + } +} \ No newline at end of file diff --git a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/plugin/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index cbfa06cb0c8..e38ea9ad343 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -25,16 +25,16 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms.Bucket; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java index b14d9745494..0469356daa2 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/action/OpenJobAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ml.action; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; @@ -27,12 +28,14 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -491,9 +494,12 @@ public class OpenJobAction extends Action 0) { try (XContentBuilder mapping = mappingSupplier.get()) { - PutMappingRequest putMappingRequest = new PutMappingRequest(concreteIndices); + PutMappingRequest putMappingRequest = new PutMappingRequest(indicesThatRequireAnUpdate); putMappingRequest.type(ElasticsearchMappings.DOC_TYPE); putMappingRequest.source(mapping); client.execute(PutMappingAction.INSTANCE, putMappingRequest, ActionListener.wrap( @@ -502,13 +508,14 @@ public class OpenJobAction extends Action indicesToUpdate = new ArrayList<>(); + + ImmutableOpenMap> currentMapping = state.metaData().findMappings(concreteIndices, + new String[] { ElasticsearchMappings.DOC_TYPE }); + + for (String index : concreteIndices) { + ImmutableOpenMap innerMap = currentMapping.get(index); + if (innerMap != null) { + MappingMetaData metaData = innerMap.get(ElasticsearchMappings.DOC_TYPE); + try { + Map meta = (Map) metaData.sourceAsMap().get("_meta"); + if (meta != null) { + String versionString = (String) meta.get("version"); + if (versionString == null) { + logger.info("Version of mappings for [{}] not found, recreating", index); + indicesToUpdate.add(index); + continue; + } + + Version mappingVersion = Version.fromString(versionString); + + if (mappingVersion.onOrAfter(minVersion)) { + continue; + } else { + logger.info("Mappings for [{}] are outdated [{}], updating it[{}].", index, mappingVersion, Version.CURRENT); + indicesToUpdate.add(index); + continue; + } + } else { + logger.info("Version of mappings for [{}] not found, recreating", index); + indicesToUpdate.add(index); + continue; + } + } catch (Exception e) { + logger.error(new ParameterizedMessage("Failed to retrieve mapping version for [{}], recreating", index), e); + indicesToUpdate.add(index); + continue; + } + } else { + logger.info("No mappings found for [{}], recreating", index); + indicesToUpdate.add(index); + } + } + return indicesToUpdate.toArray(new String[indicesToUpdate.size()]); + } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index 4c6ab0e9ca8..3f71c387d4b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -43,6 +43,8 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.function.Consumer; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import java.util.stream.Collectors; /** @@ -191,13 +193,17 @@ public class JobManager extends AbstractComponent { @Override public void onFailure(Exception e) { - if (e instanceof IllegalArgumentException - && e.getMessage().matches("mapper \\[.*\\] of different type, current_type \\[.*\\], merged_type \\[.*\\]")) { - actionListener.onFailure(ExceptionsHelper.badRequestException(Messages.JOB_CONFIG_MAPPING_TYPE_CLASH, e)); - } else { - actionListener.onFailure(e); + if (e instanceof IllegalArgumentException) { + // the underlying error differs depending on which way around the clashing fields are seen + Matcher matcher = Pattern.compile("(?:mapper|Can't merge a non object mapping) \\[(.*)\\] (?:of different type, " + + "current_type \\[.*\\], merged_type|with an object mapping) \\[.*\\]").matcher(e.getMessage()); + if (matcher.matches()) { + String msg = Messages.getMessage(Messages.JOB_CONFIG_MAPPING_TYPE_CLASH, matcher.group(1)); + actionListener.onFailure(ExceptionsHelper.badRequestException(msg, e)); + return; + } } - + actionListener.onFailure(e); } }; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java index e352671bc00..175f3108e35 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfig.java @@ -601,8 +601,8 @@ public class AnalysisConfig implements ToXContentObject, Writeable { String prevTermField = null; for (String termField : termFields) { if (prevTermField != null && termField.startsWith(prevTermField + ".")) { - throw ExceptionsHelper.badRequestException("Fields " + prevTermField + " and " + termField + - " cannot both be used in the same analysis_config"); + throw ExceptionsHelper.badRequestException("Fields [" + prevTermField + "] and [" + termField + + "] cannot both be used in the same analysis_config"); } prevTermField = termField; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java index 9e8ada6a439..7f5ba7ccc82 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/messages/Messages.java @@ -135,8 +135,7 @@ public final class Messages { public static final String JOB_CONFIG_DETECTOR_OVER_DISALLOWED = "''over'' is not a permitted value for {0}"; public static final String JOB_CONFIG_MAPPING_TYPE_CLASH = - "A field has a different mapping type to an existing field with the same name. " + - "Use the 'results_index_name' setting to assign the job to another index"; + "This job would cause a mapping clash with existing field [{0}] - avoid the clash by assigning a dedicated results index"; public static final String JOB_CONFIG_TIME_FIELD_NOT_ALLOWED_IN_ANALYSIS_CONFIG = "data_description.time_field may not be used in the analysis_config"; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ElasticsearchMappings.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ElasticsearchMappings.java index a3a83aeb15d..8de44e59cd5 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ElasticsearchMappings.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/ElasticsearchMappings.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ml.job.persistence; +import org.elasticsearch.Version; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.ml.job.config.Detector; import org.elasticsearch.xpack.ml.job.config.Job; @@ -111,10 +112,24 @@ public class ElasticsearchMappings { .endArray(); } + /** + * Inserts "_meta" containing useful information like the version into the mapping + * template. + * + * @param builder The builder for the mappings + * @throws IOException On write error + */ + public static void addMetaInformation(XContentBuilder builder) throws IOException { + builder.startObject("_meta") + .field("version", Version.CURRENT) + .endObject(); + } + public static XContentBuilder docMapping() throws IOException { XContentBuilder builder = jsonBuilder(); builder.startObject(); builder.startObject(DOC_TYPE); + addMetaInformation(builder); addDefaultMapping(builder); builder.startObject(PROPERTIES); @@ -523,12 +538,15 @@ public class ElasticsearchMappings { * by knowing the ID of a particular document. */ public static XContentBuilder stateMapping() throws IOException { - return jsonBuilder() - .startObject() - .startObject(DOC_TYPE) - .field(ENABLED, false) - .endObject() - .endObject(); + XContentBuilder builder = jsonBuilder(); + builder.startObject(); + builder.startObject(DOC_TYPE); + addMetaInformation(builder); + builder.field(ENABLED, false); + builder.endObject(); + builder.endObject(); + + return builder; } /** diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java index b99d23f4aec..a4b7d1584f3 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobStorageDeletionTask.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.job.persistence; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchRequest; @@ -24,7 +25,6 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; -import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -33,7 +33,10 @@ import org.elasticsearch.xpack.ml.job.process.autodetect.state.CategorizerState; import org.elasticsearch.xpack.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.ml.job.process.autodetect.state.Quantiles; +import java.util.ArrayList; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.function.Consumer; public class JobStorageDeletionTask extends Task { @@ -176,26 +179,31 @@ public class JobStorageDeletionTask extends Task { private void deleteAliases(String jobId, Client client, ActionListener finishedHandler) { final String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); final String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(jobId); - final String indexPattern = AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*"; - IndicesAliasesRequest request = new IndicesAliasesRequest().addAliasAction( - IndicesAliasesRequest.AliasActions.remove().aliases(readAliasName, writeAliasName).indices(indexPattern)); - client.admin().indices().aliases(request, ActionListener.wrap( - response -> finishedHandler.onResponse(true), - e -> { - if (e instanceof AliasesNotFoundException) { - logger.warn("[{}] Aliases {} not found. Continuing to delete job.", jobId, - ((AliasesNotFoundException) e).getResourceId()); + // first find the concrete indices associated with the aliases + GetAliasesRequest aliasesRequest = new GetAliasesRequest().aliases(readAliasName, writeAliasName) + .indicesOptions(IndicesOptions.lenientExpandOpen()); + client.admin().indices().getAliases(aliasesRequest, ActionListener.wrap( + getAliasesResponse -> { + Set aliases = new HashSet<>(); + getAliasesResponse.getAliases().valuesIt().forEachRemaining( + metaDataList -> metaDataList.forEach(metadata -> aliases.add(metadata.getAlias()))); + if (aliases.isEmpty()) { + // don't error if the job's aliases have already been deleted - carry on and delete the rest of the job's data finishedHandler.onResponse(true); - } else if (e instanceof IndexNotFoundException) { - logger.warn("[{}] Index [{}] referenced by alias not found. Continuing to delete job.", jobId, - ((IndexNotFoundException) e).getIndex().getName()); - finishedHandler.onResponse(true); - } else { - // all other exceptions should die - logger.error("[" + jobId + "] Failed to delete aliases [" + readAliasName + ", " + writeAliasName + "].", e); - finishedHandler.onFailure(e); + return; } - })); + List indices = new ArrayList<>(); + getAliasesResponse.getAliases().keysIt().forEachRemaining(indices::add); + // remove the aliases from the concrete indices found in the first step + IndicesAliasesRequest removeRequest = new IndicesAliasesRequest().addAliasAction( + IndicesAliasesRequest.AliasActions.remove() + .aliases(aliases.toArray(new String[aliases.size()])) + .indices(indices.toArray(new String[indices.size()]))); + client.admin().indices().aliases(removeRequest, ActionListener.wrap( + removeResponse -> finishedHandler.onResponse(true), + finishedHandler::onFailure)); + }, + finishedHandler::onFailure)); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java index 508085162cf..63af80df892 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java @@ -50,7 +50,6 @@ public class NativeController { private final CppLogMessageHandler cppLogHandler; private final OutputStream commandStream; - private Thread logTailThread; NativeController(Environment env, NamedPipeHelper namedPipeHelper) throws IOException { ProcessPipes processPipes = new ProcessPipes(env, namedPipeHelper, ProcessCtrl.CONTROLLER, null, @@ -61,15 +60,22 @@ public class NativeController { } void tailLogsInThread() { - logTailThread = new Thread(() -> { - try { - cppLogHandler.tailStream(); - cppLogHandler.close(); - } catch (IOException e) { - LOGGER.error("Error tailing C++ controller logs", e); - } - LOGGER.info("Native controller process has stopped - no new native processes can be started"); - }); + final Thread logTailThread = new Thread( + () -> { + try { + cppLogHandler.tailStream(); + cppLogHandler.close(); + } catch (IOException e) { + LOGGER.error("Error tailing C++ controller logs", e); + } + LOGGER.info("Native controller process has stopped - no new native processes can be started"); + }, + "ml-cpp-log-tail-thread"); + /* + * This thread is created on the main thread so would default to being a user thread which could prevent the JVM from exiting if + * this thread were to still be running during shutdown. As such, we mark it as a daemon thread. + */ + logTailThread.setDaemon(true); logTailThread.start(); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtil.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtil.java index af3bcf26c7c..bf26359627a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtil.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/ClusterAlertsUtil.java @@ -49,7 +49,7 @@ public class ClusterAlertsUtil { * The last time that all watches were updated. For now, all watches have been updated in the same version and should all be replaced * together. */ - public static final int LAST_UPDATED_VERSION = Version.V_6_0_0_alpha2.id; + public static final int LAST_UPDATED_VERSION = Version.V_6_0_0_beta1.id; /** * An unsorted list of Watch IDs representing resource files for Monitoring Cluster Alerts. diff --git a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java index 5a6361dca03..ad44c40fc65 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/monitoring/exporter/MonitoringTemplateUtils.java @@ -24,7 +24,7 @@ public final class MonitoringTemplateUtils { *

* It may be possible for this to diverge between templates and pipelines, but for now they're the same. */ - public static final int LAST_UPDATED_VERSION = Version.V_6_0_0_alpha2.id; + public static final int LAST_UPDATED_VERSION = Version.V_6_0_0_beta1.id; /** * Current version of templates used in their name to differentiate from breaking changes (separate from product version). diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java b/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java index 8029341f5c4..21af90b93fe 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -5,6 +5,25 @@ */ package org.elasticsearch.xpack.security; +import java.io.IOException; +import java.security.GeneralSecurityException; +import java.time.Clock; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.function.UnaryOperator; +import java.util.stream.Collectors; + import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; @@ -35,6 +54,7 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; @@ -128,8 +148,8 @@ import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.authz.store.FileRolesStore; import org.elasticsearch.xpack.security.authz.store.NativeRolesStore; import org.elasticsearch.xpack.security.authz.store.ReservedRolesStore; +import org.elasticsearch.xpack.security.bootstrap.BootstrapElasticPassword; import org.elasticsearch.xpack.security.bootstrap.ContainerPasswordBootstrapCheck; -import org.elasticsearch.xpack.security.crypto.CryptoService; import org.elasticsearch.xpack.security.rest.SecurityRestFilter; import org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction; import org.elasticsearch.xpack.security.rest.action.oauth2.RestGetTokenAction; @@ -159,28 +179,8 @@ import org.elasticsearch.xpack.ssl.SSLService; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; -import java.io.IOException; -import java.security.GeneralSecurityException; -import java.time.Clock; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.function.BiConsumer; -import java.util.function.Function; -import java.util.function.Supplier; -import java.util.function.UnaryOperator; -import java.util.stream.Collectors; - import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; -import static org.elasticsearch.common.settings.Setting.groupSetting; import static org.elasticsearch.xpack.XPackSettings.HTTP_SSL_ENABLED; public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin { @@ -323,8 +323,8 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin { final ReservedRealm reservedRealm = new ReservedRealm(env, settings, nativeUsersStore, anonymousUser, securityLifecycleService, threadPool.getThreadContext()); Map realmFactories = new HashMap<>(); - realmFactories.putAll(InternalRealms.getFactories(threadPool, resourceWatcherService, - sslService, nativeUsersStore, nativeRoleMappingStore)); + realmFactories.putAll(InternalRealms.getFactories(threadPool, resourceWatcherService, sslService, nativeUsersStore, + nativeRoleMappingStore, securityLifecycleService)); for (XPackExtension extension : extensions) { Map newRealms = extension.getRealms(resourceWatcherService); for (Map.Entry entry : newRealms.entrySet()) { @@ -369,6 +369,7 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin { } final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, rolesProviders, threadPool.getThreadContext(), licenseState); + securityLifecycleService.addSecurityIndexHealthChangeListener(allRolesStore::onSecurityIndexHealthChange); // to keep things simple, just invalidate all cached entries on license change. this happens so rarely that the impact should be // minimal licenseState.addListener(allRolesStore::invalidateAll); @@ -386,6 +387,11 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin { DestructiveOperations destructiveOperations = new DestructiveOperations(settings, clusterService.getClusterSettings()); securityInterceptor.set(new SecurityServerTransportInterceptor(settings, threadPool, authcService.get(), authzService, licenseState, sslService, securityContext.get(), destructiveOperations)); + + BootstrapElasticPassword bootstrapElasticPassword = new BootstrapElasticPassword(settings, logger, clusterService, reservedRealm, + securityLifecycleService); + bootstrapElasticPassword.initiatePasswordBootstrap(); + return components; } @@ -492,13 +498,15 @@ public class Security implements ActionPlugin, IngestPlugin, NetworkPlugin { public List getBootstrapChecks() { if (enabled) { - return Arrays.asList( - new SSLBootstrapCheck(sslService, settings, env), - new TokenPassphraseBootstrapCheck(settings), - new TokenSSLBootstrapCheck(settings), - new PkiRealmBootstrapCheck(settings, sslService), - new ContainerPasswordBootstrapCheck() + final ArrayList checks = CollectionUtils.arrayAsArrayList( + new SSLBootstrapCheck(sslService, settings, env), + new TokenPassphraseBootstrapCheck(settings), + new TokenSSLBootstrapCheck(settings), + new PkiRealmBootstrapCheck(settings, sslService), + new ContainerPasswordBootstrapCheck() ); + checks.addAll(InternalRealms.getBootstrapChecks(settings)); + return checks; } else { return Collections.emptyList(); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java b/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java index eb153feda4b..f957a0b7c6b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityFeatureSet.java @@ -183,7 +183,7 @@ public class SecurityFeatureSet implements XPackFeatureSet { sslUsage = in.readMap(); auditUsage = in.readMap(); ipFilterUsage = in.readMap(); - if (in.getVersion().before(Version.V_6_0_0_alpha3)) { + if (in.getVersion().before(Version.V_6_0_0_beta1)) { // system key has been removed but older send its usage, so read the map and ignore in.readMap(); } @@ -213,7 +213,7 @@ public class SecurityFeatureSet implements XPackFeatureSet { out.writeMap(sslUsage); out.writeMap(auditUsage); out.writeMap(ipFilterUsage); - if (out.getVersion().before(Version.V_6_0_0_alpha3)) { + if (out.getVersion().before(Version.V_6_0_0_beta1)) { // system key has been removed but older versions still expected it so send a empty map out.writeMap(Collections.emptyMap()); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java b/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java index 8a527d87029..e4e1cd61518 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/SecurityLifecycleService.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.LifecycleListener; @@ -26,6 +27,7 @@ import org.elasticsearch.xpack.security.support.IndexLifecycleManager; import java.util.Collections; import java.util.List; +import java.util.function.BiConsumer; import java.util.function.Predicate; /** @@ -147,6 +149,15 @@ public class SecurityLifecycleService extends AbstractComponent implements Clust return securityIndex.checkMappingVersion(requiredVersion); } + /** + * Adds a listener which will be notified when the security index health changes. The previous and + * current health will be provided to the listener so that the listener can determine if any action + * needs to be taken. + */ + public void addSecurityIndexHealthChangeListener(BiConsumer listener) { + securityIndex.addIndexHealthChangeListener(listener); + } + // this is called in a lifecycle listener beforeStop on the cluster service private void close() { if (indexAuditTrail != null) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionModule.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionModule.java index cdaf31ca3e1..58cf6bae6b5 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionModule.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionModule.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.security.action.filter.SecurityActionFilter; import org.elasticsearch.xpack.security.action.interceptor.BulkShardRequestInterceptor; -import org.elasticsearch.xpack.security.action.interceptor.FieldStatsRequestInterceptor; import org.elasticsearch.xpack.security.action.interceptor.RequestInterceptor; import org.elasticsearch.xpack.security.action.interceptor.SearchRequestInterceptor; import org.elasticsearch.xpack.security.action.interceptor.UpdateRequestInterceptor; @@ -33,7 +32,6 @@ public class SecurityActionModule extends AbstractSecurityModule.Node { multibinder.addBinding().to(SearchRequestInterceptor.class); multibinder.addBinding().to(UpdateRequestInterceptor.class); multibinder.addBinding().to(BulkShardRequestInterceptor.class); - multibinder.addBinding().to(FieldStatsRequestInterceptor.class); } } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldStatsRequestInterceptor.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldStatsRequestInterceptor.java deleted file mode 100644 index 84ec66c9039..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldStatsRequestInterceptor.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.action.interceptor; - -import org.elasticsearch.action.fieldstats.FieldStatsRequest; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportRequest; - -/** - * Intercepts requests to shards to field level stats and strips fields that the user is not allowed to access from the response. - */ -public class FieldStatsRequestInterceptor extends FieldAndDocumentLevelSecurityRequestInterceptor { - @Inject - public FieldStatsRequestInterceptor(Settings settings, ThreadPool threadPool, XPackLicenseState licenseState) { - super(settings, threadPool.getThreadContext(), licenseState); - } - - @Override - public boolean supports(TransportRequest request) { - return request instanceof FieldStatsRequest; - } - - @Override - protected void disableFeatures(FieldStatsRequest request, boolean fieldLevelSecurityEnabled, boolean documentLevelSecurityEnabled) { - if (fieldLevelSecurityEnabled) { - request.setUseCache(false); - } - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java b/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java index 82b8ebcbb0c..95179655991 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/action/user/ChangePasswordRequestBuilder.java @@ -39,18 +39,23 @@ public class ChangePasswordRequestBuilder return this; } + public static char[] validateAndHashPassword(SecureString password) { + Validation.Error error = Validation.Users.validatePassword(password.getChars()); + if (error != null) { + ValidationException validationException = new ValidationException(); + validationException.addValidationError(error.toString()); + throw validationException; + } + return Hasher.BCRYPT.hash(password); + } + /** * Sets the password. Note: the char[] passed to this method will be cleared. */ public ChangePasswordRequestBuilder password(char[] password) { try (SecureString secureString = new SecureString(password)) { - Validation.Error error = Validation.Users.validatePassword(password); - if (error != null) { - ValidationException validationException = new ValidationException(); - validationException.addValidationError(error.toString()); - throw validationException; - } - request.passwordHash(Hasher.BCRYPT.hash(secureString)); + char[] hash = validateAndHashPassword(secureString); + request.passwordHash(hash); } return this; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationResult.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationResult.java new file mode 100644 index 00000000000..f76bffe2e15 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationResult.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc; + +import java.util.Objects; + +import org.elasticsearch.common.inject.internal.Nullable; +import org.elasticsearch.xpack.security.user.User; + +/** + * Represents the result of an authentication attempt. + * This allows a {@link Realm} to respond in 3 different ways (without needing to + * resort to {@link org.elasticsearch.action.ActionListener#onFailure(Exception)}) + *

    + *
  1. Successful authentication of a user
  2. + *
  3. Unable to authenticate user, try another realm (optionally with a diagnostic message)
  4. + *
  5. Unable to authenticate user, terminate authentication (with an error message)
  6. + *
+ */ +public final class AuthenticationResult { + private static final AuthenticationResult NOT_HANDLED = new AuthenticationResult(Status.CONTINUE, null, null, null); + + public enum Status { + SUCCESS, + CONTINUE, + TERMINATE, + } + + private final Status status; + private final User user; + private final String message; + private final Exception exception; + + private AuthenticationResult(Status status, @Nullable User user, @Nullable String message, @Nullable Exception exception) { + this.status = status; + this.user = user; + this.message = message; + this.exception = exception; + } + + public Status getStatus() { + return status; + } + + public User getUser() { + return user; + } + + public String getMessage() { + return message; + } + + public Exception getException() { + return exception; + } + + /** + * Creates an {@code AuthenticationResult} that indicates that the supplied {@link User} + * has been successfully authenticated. + *

+ * The {@link #getStatus() status} is set to {@link Status#SUCCESS}. + *

+ * Neither the {@link #getMessage() message} nor {@link #getException() exception} are populated. + *

+ * @param user The user that was authenticated. Cannot be {@code null}. + */ + public static AuthenticationResult success(User user) { + Objects.requireNonNull(user); + return new AuthenticationResult(Status.SUCCESS, user, null, null); + } + + /** + * Creates an {@code AuthenticationResult} that indicates that the realm did not handle the + * authentication request in any way, and has no failure messages. + *

+ * The {@link #getStatus() status} is set to {@link Status#CONTINUE}. + *

+ * The {@link #getMessage() message}, {@link #getException() exception}, and {@link #getUser() user} are all set to {@code null}. + *

+ */ + public static AuthenticationResult notHandled() { + return NOT_HANDLED; + } + + /** + * Creates an {@code AuthenticationResult} that indicates that the realm attempted to handle the authentication request but was + * unsuccessful. The reason for the failure is given in the supplied message and optional exception. + *

+ * The {@link #getStatus() status} is set to {@link Status#CONTINUE}. + *

+ * The {@link #getUser() user} is not populated. + *

+ */ + public static AuthenticationResult unsuccessful(String message, @Nullable Exception cause) { + Objects.requireNonNull(message); + return new AuthenticationResult(Status.CONTINUE, null, message, cause); + } + + /** + * Creates an {@code AuthenticationResult} that indicates that the realm attempted to handle the authentication request, was + * unsuccessful and wants to terminate this authentication request. + * The reason for the failure is given in the supplied message and optional exception. + *

+ * The {@link #getStatus() status} is set to {@link Status#TERMINATE}. + *

+ * The {@link #getUser() user} is not populated. + *

+ */ + public static AuthenticationResult terminate(String message, @Nullable Exception cause) { + return new AuthenticationResult(Status.TERMINATE, null, message, cause); + } + + public boolean isAuthenticated() { + return status == Status.SUCCESS; + } + + @Override + public String toString() { + return "AuthenticationResult{" + + "status=" + status + + ", user=" + user + + ", message=" + message + + ", exception=" + exception + + '}'; + } + +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index 38dae45516b..cfd3fd390a1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -5,6 +5,13 @@ */ package org.elasticsearch.xpack.security.authc; +import java.net.InetSocketAddress; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Consumer; + import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchSecurityException; @@ -27,15 +34,10 @@ import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.security.support.Exceptions; import org.elasticsearch.xpack.security.user.AnonymousUser; import org.elasticsearch.xpack.security.user.User; -import java.net.InetSocketAddress; -import java.util.List; -import java.util.Map; -import java.util.function.BiConsumer; -import java.util.function.Consumer; - import static org.elasticsearch.xpack.security.Security.setting; /** @@ -170,7 +172,7 @@ public class AuthenticationService extends AbstractComponent { *
  • look for a user token
  • *
  • token extraction {@link #extractToken(Consumer)}
  • *
  • token authentication {@link #consumeToken(AuthenticationToken)}
  • - *
  • user lookup for run as if necessary {@link #consumeUser(User)} and + *
  • user lookup for run as if necessary {@link #consumeUser(User, Map)} and * {@link #lookupRunAsUser(User, String, Consumer)}
  • *
  • write authentication into the context {@link #finishAuthentication(User)}
  • * @@ -255,8 +257,8 @@ public class AuthenticationService extends AbstractComponent { /** * Consumes the {@link AuthenticationToken} provided by the caller. In the case of a {@code null} token, {@link #handleNullToken()} * is called. In the case of a {@code non-null} token, the realms are iterated over and the first realm that returns a non-null - * {@link User} is the authenticating realm and iteration is stopped. This user is then passed to {@link #consumeUser(User)} if no - * exception was caught while trying to authenticate the token + * {@link User} is the authenticating realm and iteration is stopped. This user is then passed to {@link #consumeUser(User, Map)} + * if no exception was caught while trying to authenticate the token */ private void consumeToken(AuthenticationToken token) { if (token == null) { @@ -264,30 +266,42 @@ public class AuthenticationService extends AbstractComponent { } else { authenticationToken = token; final List realmsList = realms.asList(); + final Map> messages = new LinkedHashMap<>(); final BiConsumer> realmAuthenticatingConsumer = (realm, userListener) -> { if (realm.supports(authenticationToken)) { - realm.authenticate(authenticationToken, ActionListener.wrap((user) -> { - if (user == null) { - // the user was not authenticated, call this so we can audit the correct event - request.realmAuthenticationFailed(authenticationToken, realm.name()); - } else { + realm.authenticate(authenticationToken, ActionListener.wrap((result) -> { + assert result != null : "Realm " + realm + " produced a null authentication result"; + if (result.getStatus() == AuthenticationResult.Status.SUCCESS) { // user was authenticated, populate the authenticated by information authenticatedBy = new RealmRef(realm.name(), realm.type(), nodeName); + userListener.onResponse(result.getUser()); + } else { + // the user was not authenticated, call this so we can audit the correct event + request.realmAuthenticationFailed(authenticationToken, realm.name()); + if (result.getStatus() == AuthenticationResult.Status.TERMINATE) { + logger.info("Authentication of [{}] was terminated by realm [{}] - {}", + authenticationToken.principal(), realm.name(), result.getMessage()); + userListener.onFailure(Exceptions.authenticationError(result.getMessage(), result.getException())); + } else { + if (result.getMessage() != null) { + messages.put(realm, new Tuple<>(result.getMessage(), result.getException())); + } + userListener.onResponse(null); + } } - userListener.onResponse(user); }, (ex) -> { - logger.warn( - "An error occurred while attempting to authenticate [{}] against realm [{}] - {}", - authenticationToken.principal(), realm.name(), ex); - logger.debug("Authentication failed due to exception", ex); + logger.warn(new ParameterizedMessage( + "An error occurred while attempting to authenticate [{}] against realm [{}]", + authenticationToken.principal(), realm.name()), ex); userListener.onFailure(ex); - }), request); + })); } else { userListener.onResponse(null); } }; final IteratingActionListener authenticatingListener = - new IteratingActionListener<>(ActionListener.wrap(this::consumeUser, + new IteratingActionListener<>(ActionListener.wrap( + (user) -> consumeUser(user, messages), (e) -> listener.onFailure(request.exceptionProcessingRequest(e, token))), realmAuthenticatingConsumer, realmsList, threadContext); try { @@ -342,10 +356,9 @@ public class AuthenticationService extends AbstractComponent { * functionality is in use. When run as is not in use, {@link #finishAuthentication(User)} is called, otherwise we try to lookup * the run as user in {@link #lookupRunAsUser(User, String, Consumer)} */ - private void consumeUser(User user) { + private void consumeUser(User user, Map> messages) { if (user == null) { - final Map> failureDetails = Realm.getAuthenticationFailureDetails(threadContext); - failureDetails.forEach((realm, tuple) -> { + messages.forEach((realm, tuple) -> { final String message = tuple.v1(); final String cause = tuple.v2() == null ? "" : " (Caused by " + tuple.v2() + ")"; logger.warn("Authentication to realm {} failed - {}{}", realm.name(), message, cause); @@ -438,21 +451,16 @@ public class AuthenticationService extends AbstractComponent { } } - abstract static class AuditableRequest implements IncomingRequest { + abstract static class AuditableRequest { final AuditTrail auditTrail; final AuthenticationFailureHandler failureHandler; final ThreadContext threadContext; - private final InetSocketAddress remoteAddress; - private final RequestType requestType; - AuditableRequest(AuditTrail auditTrail, AuthenticationFailureHandler failureHandler, ThreadContext threadContext, - RequestType requestType, InetSocketAddress remoteAddress) { + AuditableRequest(AuditTrail auditTrail, AuthenticationFailureHandler failureHandler, ThreadContext threadContext) { this.auditTrail = auditTrail; this.failureHandler = failureHandler; this.threadContext = threadContext; - this.remoteAddress = remoteAddress; - this.requestType = requestType; } abstract void realmAuthenticationFailed(AuthenticationToken token, String realm); @@ -469,13 +477,6 @@ public class AuthenticationService extends AbstractComponent { abstract void authenticationSuccess(String realm, User user); - public InetSocketAddress getRemoteAddress() { - return remoteAddress; - } - - public RequestType getType() { - return requestType; - } } static class AuditableTransportRequest extends AuditableRequest { @@ -485,7 +486,7 @@ public class AuthenticationService extends AbstractComponent { AuditableTransportRequest(AuditTrail auditTrail, AuthenticationFailureHandler failureHandler, ThreadContext threadContext, String action, TransportMessage message) { - super(auditTrail, failureHandler, threadContext, getType(message), getRemoteAddress(message)); + super(auditTrail, failureHandler, threadContext); this.action = action; this.message = message; } @@ -539,14 +540,6 @@ public class AuthenticationService extends AbstractComponent { return "transport request action [" + action + "]"; } - private static RequestType getType(TransportMessage message) { - return message.remoteAddress() == null ? RequestType.LOCAL_NODE : RequestType.REMOTE_NODE; - } - - private static InetSocketAddress getRemoteAddress(TransportMessage message) { - TransportAddress transportAddress = message.remoteAddress(); - return transportAddress == null ? null : transportAddress.address(); - } } static class AuditableRestRequest extends AuditableRequest { @@ -556,7 +549,7 @@ public class AuthenticationService extends AbstractComponent { @SuppressWarnings("unchecked") AuditableRestRequest(AuditTrail auditTrail, AuthenticationFailureHandler failureHandler, ThreadContext threadContext, RestRequest request) { - super(auditTrail, failureHandler, threadContext, RequestType.REST, (InetSocketAddress) request.getRemoteAddress()); + super(auditTrail, failureHandler, threadContext); this.request = request; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/IncomingRequest.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/IncomingRequest.java deleted file mode 100644 index f8da624c5f5..00000000000 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/IncomingRequest.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authc; - -import java.net.InetSocketAddress; - -/** - * This represents an incoming request that needs to be authenticated - */ -public interface IncomingRequest { - - /** - * This method returns the remote address for the request. It will be null if the request is a - * local transport request. - * - * @return the remote socket address - */ - InetSocketAddress getRemoteAddress(); - - /** - * This returns the type of request that is incoming. It can be a rest request, a remote - * transport request, or a local transport request. - * - * @return the request type - */ - RequestType getType(); - - enum RequestType { - REST, - REMOTE_NODE, - LOCAL_NODE - } -} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java index 4f110026270..27e237c94a0 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/InternalRealms.java @@ -5,25 +5,31 @@ */ package org.elasticsearch.xpack.security.authc; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.security.SecurityLifecycleService; import org.elasticsearch.xpack.security.authc.esnative.NativeRealm; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authc.file.FileRealm; import org.elasticsearch.xpack.security.authc.ldap.LdapRealm; import org.elasticsearch.xpack.security.authc.pki.PkiRealm; +import org.elasticsearch.xpack.security.authc.support.RoleMappingFileBootstrapCheck; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.ssl.SSLService; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - /** * Provides a single entry point into dealing with all standard XPack security {@link Realm realms}. * This class does not handle extensions. @@ -57,14 +63,18 @@ public class InternalRealms { * This excludes the {@link ReservedRealm}, as it cannot be created dynamically. * @return A map from realm-type to Factory */ - public static Map getFactories( - ThreadPool threadPool, ResourceWatcherService resourceWatcherService, - SSLService sslService, NativeUsersStore nativeUsersStore, - NativeRoleMappingStore nativeRoleMappingStore) { + public static Map getFactories(ThreadPool threadPool, ResourceWatcherService resourceWatcherService, + SSLService sslService, NativeUsersStore nativeUsersStore, + NativeRoleMappingStore nativeRoleMappingStore, + SecurityLifecycleService securityLifecycleService) { Map map = new HashMap<>(); map.put(FileRealm.TYPE, config -> new FileRealm(config, resourceWatcherService)); - map.put(NativeRealm.TYPE, config -> new NativeRealm(config, nativeUsersStore)); + map.put(NativeRealm.TYPE, config -> { + final NativeRealm nativeRealm = new NativeRealm(config, nativeUsersStore); + securityLifecycleService.addSecurityIndexHealthChangeListener(nativeRealm::onSecurityIndexHealthChange); + return nativeRealm; + }); map.put(LdapRealm.AD_TYPE, config -> new LdapRealm(LdapRealm.AD_TYPE, config, sslService, resourceWatcherService, nativeRoleMappingStore, threadPool)); map.put(LdapRealm.LDAP_TYPE, config -> new LdapRealm(LdapRealm.LDAP_TYPE, config, @@ -78,7 +88,7 @@ public class InternalRealms { * This excludes the {@link ReservedRealm}, as it cannot be configured dynamically. * @return A map from realm-type to a collection of Setting objects. */ - public static Map>> getSettings() { + public static Map>> getSettings() { Map>> map = new HashMap<>(); map.put(FileRealm.TYPE, FileRealm.getSettings()); map.put(NativeRealm.TYPE, NativeRealm.getSettings()); @@ -91,4 +101,21 @@ public class InternalRealms { private InternalRealms() { } + public static List getBootstrapChecks(final Settings globalSettings) { + final List checks = new ArrayList<>(); + final Map settingsByRealm = RealmSettings.getRealmSettings(globalSettings); + settingsByRealm.forEach((name, settings) -> { + final RealmConfig realmConfig = new RealmConfig(name, settings, globalSettings, null); + switch (realmConfig.type()) { + case LdapRealm.AD_TYPE: + case LdapRealm.LDAP_TYPE: + case PkiRealm.TYPE: + final BootstrapCheck check = RoleMappingFileBootstrapCheck.create(realmConfig); + if (check != null) { + checks.add(check); + } + } + }); + return checks; + } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/Realm.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/Realm.java index c3de9818c54..676d19f5030 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/Realm.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/Realm.java @@ -5,18 +5,14 @@ */ package org.elasticsearch.xpack.security.authc; +import java.util.HashMap; +import java.util.Map; + import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.security.user.User; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - /** * An authentication mechanism to which the default authentication {@link org.elasticsearch.xpack.security.authc.AuthenticationService * service } delegates the authentication process. Different realms may be defined, each may be based on different @@ -24,8 +20,6 @@ import java.util.Map; */ public abstract class Realm implements Comparable { - private static final String AUTHENTICATION_FAILURES_KEY = "_xpack_security_auth_failures"; - protected final Logger logger; protected final String type; protected RealmConfig config; @@ -37,21 +31,21 @@ public abstract class Realm implements Comparable { } /** - * @return The type of this realm + * @return The type of this realm */ public String type() { return type; } /** - * @return The name of this realm. + * @return The name of this realm. */ public String name() { return config.name; } /** - * @return The order of this realm within the executing realm chain. + * @return The order of this realm within the executing realm chain. */ public int order() { return config.order; @@ -63,7 +57,7 @@ public abstract class Realm implements Comparable { } /** - * @return {@code true} if this realm supports the given authentication token, {@code false} otherwise. + * @return {@code true} if this realm supports the given authentication token, {@code false} otherwise. */ public abstract boolean supports(AuthenticationToken token); @@ -71,22 +65,39 @@ public abstract class Realm implements Comparable { * Attempts to extract an authentication token from the given context. If an appropriate token * is found it's returned, otherwise {@code null} is returned. * - * @param context The context that will provide information about the incoming request - * @return The authentication token or {@code null} if not found + * @param context The context that will provide information about the incoming request + * @return The authentication token or {@code null} if not found */ public abstract AuthenticationToken token(ThreadContext context); /** - * Authenticates the given token in an asynchronous fashion. A successful authentication will call the - * {@link ActionListener#onResponse} with the User associated with the given token. An unsuccessful authentication calls - * with {@code null} on the argument. + * Authenticates the given token in an asynchronous fashion. + *

    + * A successful authentication will call {@link ActionListener#onResponse} with a + * {@link AuthenticationResult#success successful} result, which includes the user associated with the given token. + *
    + * If the realm does not support, or cannot handle the token, it will call {@link ActionListener#onResponse} with a + * {@link AuthenticationResult#notHandled not-handled} result. + * This can include cases where the token identifies as user that is not known by this realm. + *
    + * If the realm can handle the token, but authentication failed it will typically call {@link ActionListener#onResponse} with a + * {@link AuthenticationResult#unsuccessful failure} result, which includes a diagnostic message regarding the failure. + * This can include cases where the token identifies a valid user, but has an invalid password. + *
    + * If the realm wishes to assert that it has the exclusive right to handle the provided token, but authentication was not successful + * it typically call {@link ActionListener#onResponse} with a + * {@link AuthenticationResult#terminate termination} result, which includes a diagnostic message regarding the failure. + * This can include cases where the token identifies a valid user, but has an invalid password and no other realm is allowed to + * authenticate that user. + *

    + *

    + * The remote address should be {@code null} if the request initiated from the local node. + *

    * - * The remote address should be null if the request initiated from the local node. - * @param token The authentication token - * @param listener The listener to pass the authentication result to - * @param incomingRequest the request that is being authenticated + * @param token The authentication token + * @param listener The listener to pass the authentication result to */ - public abstract void authenticate(AuthenticationToken token, ActionListener listener, IncomingRequest incomingRequest); + public abstract void authenticate(AuthenticationToken token, ActionListener listener); /** * Looks up the user identified the String identifier. A successful lookup will call the {@link ActionListener#onResponse} @@ -117,35 +128,11 @@ public abstract class Realm implements Comparable { /** * Constructs a realm which will be used for authentication. + * * @param config The configuration for the realm * @throws Exception an exception may be thrown if there was an error during realm creation */ Realm create(RealmConfig config) throws Exception; } - /** - * Provides a mechanism for a realm to report errors that were handled within a realm, but may - * provide useful diagnostics about why authentication failed. - */ - protected final void setFailedAuthenticationDetails(String message, @Nullable Exception cause) { - final ThreadContext threadContext = config.threadContext(); - Map> failures = threadContext.getTransient(AUTHENTICATION_FAILURES_KEY); - if (failures == null) { - failures = new LinkedHashMap<>(); - threadContext.putTransient(AUTHENTICATION_FAILURES_KEY, failures); - } - failures.put(this, new Tuple<>(message, cause)); - } - - /** - * Retrieves any authentication failures messages that were set using {@link #setFailedAuthenticationDetails(String, Exception)} - */ - static Map> getAuthenticationFailureDetails(ThreadContext threadContext) { - final Map> failures = threadContext.getTransient(AUTHENTICATION_FAILURES_KEY); - if (failures == null) { - return Collections.emptyMap(); - } - return failures; - } - } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmConfig.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmConfig.java index 6ebc83b865b..2c4db222295 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmConfig.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmConfig.java @@ -16,6 +16,7 @@ public class RealmConfig { final String name; final boolean enabled; final int order; + private final String type; final Settings settings; private final Environment env; @@ -35,6 +36,7 @@ public class RealmConfig { this.env = env; enabled = RealmSettings.ENABLED_SETTING.get(settings); order = RealmSettings.ORDER_SETTING.get(settings); + type = RealmSettings.TYPE_SETTING.get(settings); this.threadContext = threadContext; } @@ -50,6 +52,10 @@ public class RealmConfig { return order; } + public String type() { + return type; + } + public Settings settings() { return settings; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmSettings.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmSettings.java index 6ed8caa40d4..22eee8052f9 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmSettings.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/RealmSettings.java @@ -5,11 +5,6 @@ */ package org.elasticsearch.xpack.security.authc; -import org.elasticsearch.common.settings.AbstractScopedSettings; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.extensions.XPackExtension; - import java.util.Collection; import java.util.HashMap; import java.util.HashSet; @@ -17,8 +12,14 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Consumer; +import java.util.function.Function; import java.util.stream.Collectors; +import org.elasticsearch.common.settings.AbstractScopedSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.extensions.XPackExtension; + import static org.elasticsearch.common.Strings.isNullOrEmpty; import static org.elasticsearch.xpack.security.Security.setting; @@ -71,6 +72,16 @@ public class RealmSettings { return settings.getByPrefix(RealmSettings.PREFIX); } + /** + * Extracts the realm settings from a global settings object. + * Returns a Map of realm-name to realm-settings. + */ + public static Map getRealmSettings(Settings globalSettings) { + Settings realmsSettings = RealmSettings.get(globalSettings); + return realmsSettings.names().stream() + .collect(Collectors.toMap(Function.identity(), realmsSettings::getAsSettings)); + } + /** * Convert the child {@link Setting} for the provided realm into a fully scoped key for use in an error message. * @see #PREFIX diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java index fc99e52c33e..85c7aa11e21 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealm.java @@ -6,8 +6,10 @@ package org.elasticsearch.xpack.security.authc.esnative; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; @@ -35,10 +37,25 @@ public class NativeRealm extends CachingUsernamePasswordRealm { } @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { userStore.verifyPassword(token.principal(), token.credentials(), listener); } + public void onSecurityIndexHealthChange(ClusterIndexHealth previousHealth, ClusterIndexHealth currentHealth) { + final boolean movedFromRedToNonRed = (previousHealth == null || previousHealth.getStatus() == ClusterHealthStatus.RED) + && currentHealth != null && currentHealth.getStatus() != ClusterHealthStatus.RED; + final boolean indexDeleted = previousHealth != null && currentHealth == null; + + if (movedFromRedToNonRed || indexDeleted) { + clearCache(); + } + } + + // method is used for testing to verify cache expiration since expireAll is final + void clearCache() { + expireAll(); + } + /** * @return The {@link Setting setting configuration} for this realm type */ diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 39585294d05..427866eadaa 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheResponse; import org.elasticsearch.xpack.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.security.action.user.DeleteUserRequest; import org.elasticsearch.xpack.security.action.user.PutUserRequest; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.ContainerSettings; import org.elasticsearch.xpack.security.authc.support.Hasher; import org.elasticsearch.xpack.security.client.SecurityClient; @@ -512,14 +513,14 @@ public class NativeUsersStore extends AbstractComponent { * @param username username to lookup the user by * @param password the plaintext password to verify */ - void verifyPassword(String username, final SecureString password, ActionListener listener) { + void verifyPassword(String username, final SecureString password, ActionListener listener) { getUserAndPassword(username, ActionListener.wrap((userAndPassword) -> { if (userAndPassword == null || userAndPassword.passwordHash() == null) { listener.onResponse(null); } else if (hasher.verify(password, userAndPassword.passwordHash())) { - listener.onResponse(userAndPassword.user()); + listener.onResponse(AuthenticationResult.success(userAndPassword.user())); } else { - listener.onResponse(null); + listener.onResponse(AuthenticationResult.unsuccessful("Password authentication failed for " + username, null)); } }, listener::onFailure)); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java index 3b55ce42dbe..6127c064f89 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -17,7 +18,8 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.SecurityLifecycleService; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.xpack.security.action.user.ChangePasswordRequest; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ReservedUserInfo; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; @@ -31,9 +33,6 @@ import org.elasticsearch.xpack.security.user.KibanaUser; import org.elasticsearch.xpack.security.user.LogstashSystemUser; import org.elasticsearch.xpack.security.user.User; -import java.net.InetAddress; -import java.net.NetworkInterface; -import java.net.SocketException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -50,7 +49,6 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { public static final SecureString EMPTY_PASSWORD_TEXT = new SecureString("".toCharArray()); static final char[] EMPTY_PASSWORD_HASH = Hasher.BCRYPT.hash(EMPTY_PASSWORD_TEXT); - static final char[] OLD_DEFAULT_PASSWORD_HASH = Hasher.BCRYPT.hash(new SecureString("changeme".toCharArray())); private static final ReservedUserInfo DEFAULT_USER_INFO = new ReservedUserInfo(EMPTY_PASSWORD_HASH, true, true); private static final ReservedUserInfo DISABLED_USER_INFO = new ReservedUserInfo(EMPTY_PASSWORD_HASH, false, true); @@ -58,6 +56,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { public static final Setting ACCEPT_DEFAULT_PASSWORD_SETTING = Setting.boolSetting( Security.setting("authc.accept_default_password"), true, Setting.Property.NodeScope, Setting.Property.Filtered, Setting.Property.Deprecated); + public static final Setting BOOTSTRAP_ELASTIC_PASSWORD = SecureSetting.secureString("bootstrap.password", null); private final NativeUsersStore nativeUsersStore; private final AnonymousUser anonymousUser; @@ -76,73 +75,38 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { } @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { - if (incomingRequest.getType() != IncomingRequest.RequestType.REST) { - doAuthenticate(token, listener, false); - } else { - InetAddress address = incomingRequest.getRemoteAddress().getAddress(); - - try { - // This checks if the address is the loopback address or if it is bound to one of this machine's - // network interfaces. This is because we want to allow requests that originate from this machine. - final boolean isLocalMachine = address.isLoopbackAddress() || NetworkInterface.getByInetAddress(address) != null; - doAuthenticate(token, listener, isLocalMachine); - } catch (SocketException e) { - listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]", e, token.principal())); - } - } - } - - private void doAuthenticate(UsernamePasswordToken token, ActionListener listener, boolean acceptEmptyPassword) { + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { if (realmEnabled == false) { - listener.onResponse(null); + listener.onResponse(AuthenticationResult.notHandled()); } else if (isReserved(token.principal(), config.globalSettings()) == false) { - listener.onResponse(null); + listener.onResponse(AuthenticationResult.notHandled()); } else { getUserInfo(token.principal(), ActionListener.wrap((userInfo) -> { - Runnable action; + AuthenticationResult result; if (userInfo != null) { try { if (userInfo.hasEmptyPassword) { - // norelease - // Accepting the OLD_DEFAULT_PASSWORD_HASH is a transition step. We do not want to support - // this in a release. - if (isSetupMode(token.principal(), acceptEmptyPassword) == false) { - action = () -> listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]", - token.principal())); - } else if (verifyPassword(userInfo, token) - || Hasher.BCRYPT.verify(token.credentials(), OLD_DEFAULT_PASSWORD_HASH)) { - action = () -> listener.onResponse(getUser(token.principal(), userInfo)); - } else { - action = () -> listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]", - token.principal())); - } + result = AuthenticationResult.terminate("failed to authenticate user [" + token.principal() + "]", null); } else if (verifyPassword(userInfo, token)) { final User user = getUser(token.principal(), userInfo); - action = () -> listener.onResponse(user); + result = AuthenticationResult.success(user); } else { - action = () -> listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]", - token.principal())); + result = AuthenticationResult.terminate("failed to authenticate user [" + token.principal() + "]", null); } } finally { - if (userInfo.passwordHash != EMPTY_PASSWORD_HASH && userInfo.passwordHash != OLD_DEFAULT_PASSWORD_HASH) { + if (userInfo.passwordHash != EMPTY_PASSWORD_HASH) { Arrays.fill(userInfo.passwordHash, (char) 0); } } } else { - action = () -> listener.onFailure(Exceptions.authenticationError("failed to authenticate user [{}]", - token.principal())); + result = AuthenticationResult.terminate("failed to authenticate user [" + token.principal() + "]", null); } - // we want the finally block to clear out the chars before we proceed further so we execute the action here - action.run(); + // we want the finally block to clear out the chars before we proceed further so we handle the result here + listener.onResponse(result); }, listener::onFailure)); } } - private boolean isSetupMode(String userName, boolean acceptEmptyPassword) { - return ElasticUser.NAME.equals(userName) && acceptEmptyPassword; - } - private boolean verifyPassword(ReservedUserInfo userInfo, UsernamePasswordToken token) { if (Hasher.BCRYPT.verify(token.credentials(), userInfo.passwordHash)) { return true; @@ -186,11 +150,36 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { } } + public synchronized void bootstrapElasticUserCredentials(SecureString passwordHash, ActionListener listener) { + getUserInfo(ElasticUser.NAME, new ActionListener() { + @Override + public void onResponse(ReservedUserInfo reservedUserInfo) { + if (reservedUserInfo == null) { + listener.onFailure(new IllegalStateException("unexpected state: ReservedUserInfo was null")); + } else if (reservedUserInfo.hasEmptyPassword) { + ChangePasswordRequest changePasswordRequest = new ChangePasswordRequest(); + changePasswordRequest.username(ElasticUser.NAME); + changePasswordRequest.passwordHash(passwordHash.getChars()); + nativeUsersStore.changePassword(changePasswordRequest, + ActionListener.wrap(v -> listener.onResponse(true), listener::onFailure)); + + } else { + listener.onResponse(false); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + private User getUser(String username, ReservedUserInfo userInfo) { assert username != null; switch (username) { case ElasticUser.NAME: - return new ElasticUser(userInfo.enabled, userInfo.hasEmptyPassword); + return new ElasticUser(userInfo.enabled); case KibanaUser.NAME: return new KibanaUser(userInfo.enabled); case LogstashSystemUser.NAME: @@ -214,8 +203,7 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { List users = new ArrayList<>(4); ReservedUserInfo userInfo = reservedUserInfos.get(ElasticUser.NAME); - users.add(new ElasticUser(userInfo == null || userInfo.enabled, - userInfo == null || userInfo.hasEmptyPassword)); + users.add(new ElasticUser(userInfo == null || userInfo.enabled)); userInfo = reservedUserInfos.get(KibanaUser.NAME); users.add(new KibanaUser(userInfo == null || userInfo.enabled)); @@ -277,5 +265,6 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { public static void addSettings(List> settingsList) { settingsList.add(ACCEPT_DEFAULT_PASSWORD_SETTING); + settingsList.add(BOOTSTRAP_ELASTIC_PASSWORD); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java index 66d8e575cbb..ad66a08d278 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java @@ -14,6 +14,7 @@ import org.elasticsearch.cli.Terminal; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -37,22 +38,32 @@ import java.util.function.Function; */ public class SetupPasswordTool extends MultiCommand { - private static final char[] CHARS = ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + + private static final char[] CHARS = ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + "~!@#$%^&*-_=+?").toCharArray(); private static final String[] USERS = new String[]{ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME}; private final Function clientFunction; + private final CheckedFunction keyStoreFunction; private CommandLineHttpClient client; SetupPasswordTool() { - this((environment) -> new CommandLineHttpClient(environment.settings(), environment)); + this((environment) -> new CommandLineHttpClient(environment.settings(), environment), + (environment) -> { + KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.load(environment.configFile()); + if (keyStoreWrapper == null) { + throw new UserException(ExitCodes.CONFIG, "Keystore does not exist"); + } + return keyStoreWrapper; + }); } - SetupPasswordTool(Function clientFunction) { + SetupPasswordTool(Function clientFunction, + CheckedFunction keyStoreFunction) { super("Sets the passwords for reserved users"); subcommands.put("auto", new AutoSetup()); subcommands.put("interactive", new InteractiveSetup()); this.clientFunction = clientFunction; + this.keyStoreFunction = keyStoreFunction; } public static void main(String[] args) throws Exception { @@ -135,7 +146,7 @@ public class SetupPasswordTool extends MultiCommand { try (SecureString password2 = new SecureString(terminal.readSecret("Reenter password for [" + user + "]: "))) { if (password1.equals(password2) == false) { password1.close(); - throw new UserException(ExitCodes.USAGE, "Passwords for user [" + user+ "] do not match"); + throw new UserException(ExitCodes.USAGE, "Passwords for user [" + user + "] do not match"); } } return password1; @@ -157,7 +168,7 @@ public class SetupPasswordTool extends MultiCommand { private OptionSpec noPromptOption; private String elasticUser = ElasticUser.NAME; - private SecureString elasticUserPassword = ReservedRealm.EMPTY_PASSWORD_TEXT; + private SecureString elasticUserPassword; private String url; SetupCommand(String description) { @@ -165,11 +176,17 @@ public class SetupPasswordTool extends MultiCommand { setParser(); } - void setupOptions(OptionSet options, Environment env) { + void setupOptions(OptionSet options, Environment env) throws Exception { client = clientFunction.apply(env); + KeyStoreWrapper keyStore = keyStoreFunction.apply(env); String providedUrl = urlOption.value(options); url = providedUrl == null ? "http://localhost:9200" : providedUrl; setShouldPrompt(options); + + // TODO: We currently do not support keystore passwords + keyStore.decrypt(new char[0]); + + elasticUserPassword = keyStore.getString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey()); } private void setParser() { @@ -199,6 +216,7 @@ public class SetupPasswordTool extends MultiCommand { BiConsumer callback) throws Exception { boolean isSuperUser = user.equals(elasticUser); SecureString password = passwordFn.apply(user); + try { String route = url + "/_xpack/security/user/" + user + "/_password"; String response = client.postURL("PUT", route, elasticUser, elasticUserPassword, buildPayload(password)); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/FileRealm.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/FileRealm.java index 8aa9b4b0a2f..c49dd3c1242 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/FileRealm.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/FileRealm.java @@ -5,18 +5,18 @@ */ package org.elasticsearch.xpack.security.authc.file; -import java.util.Map; -import java.util.Set; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.security.user.User; +import java.util.Map; +import java.util.Set; + public class FileRealm extends CachingUsernamePasswordRealm { public static final String TYPE = "file"; @@ -38,13 +38,12 @@ public class FileRealm extends CachingUsernamePasswordRealm { } @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { - if (userPasswdStore.verifyPassword(token.principal(), token.credentials())) { + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { + final AuthenticationResult result = userPasswdStore.verifyPassword(token.principal(), token.credentials(), () -> { String[] roles = userRolesStore.roles(token.principal()); - listener.onResponse(new User(token.principal(), roles)); - } else { - listener.onResponse(null); - } + return new User(token.principal(), roles); + }); + listener.onResponse(result); } @Override diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java index 1691836463f..3b5246e3aed 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStore.java @@ -18,11 +18,13 @@ import org.elasticsearch.watcher.FileWatcher; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.XPackPlugin; import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.Hasher; import org.elasticsearch.xpack.security.support.NoOpLogger; import org.elasticsearch.xpack.security.support.Validation; import org.elasticsearch.xpack.security.support.Validation.Users; +import org.elasticsearch.xpack.security.user.User; import java.io.IOException; import java.io.PrintWriter; @@ -78,16 +80,15 @@ public class FileUserPasswdStore { return users.size(); } - public boolean verifyPassword(String username, SecureString password) { + public AuthenticationResult verifyPassword(String username, SecureString password, java.util.function.Supplier user) { char[] hash = users.get(username); if (hash == null) { - return false; + return AuthenticationResult.notHandled(); } if (hasher.verify(password, hash) == false) { - logger.debug("User [{}] exists in file but authentication failed", username); - return false; + return AuthenticationResult.unsuccessful("Password authentication failed for " + username, null); } - return true; + return AuthenticationResult.success(user.get()); } public boolean userExists(String username) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolver.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolver.java index 4a0d44a452f..506454a8a22 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolver.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolver.java @@ -24,9 +24,13 @@ import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactory.AD_DOMAIN_NAME_SETTING; +import static org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactory.buildDnFromDomain; import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.OBJECT_CLASS_PRESENCE_FILTER; import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.search; import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.searchForEntry; +import static org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory.IGNORE_REFERRAL_ERRORS_SETTING; + class ActiveDirectoryGroupsResolver implements GroupsResolver { @@ -35,11 +39,10 @@ class ActiveDirectoryGroupsResolver implements GroupsResolver { private final LdapSearchScope scope; private final boolean ignoreReferralErrors; - ActiveDirectoryGroupsResolver(Settings settings, String baseDnDefault, - boolean ignoreReferralErrors) { - this.baseDn = settings.get("base_dn", baseDnDefault); - this.scope = LdapSearchScope.resolve(settings.get("scope"), LdapSearchScope.SUB_TREE); - this.ignoreReferralErrors = ignoreReferralErrors; + ActiveDirectoryGroupsResolver(Settings settings) { + this.baseDn = settings.get("group_search.base_dn", buildDnFromDomain(settings.get(AD_DOMAIN_NAME_SETTING))); + this.scope = LdapSearchScope.resolve(settings.get("group_search.scope"), LdapSearchScope.SUB_TREE); + this.ignoreReferralErrors = IGNORE_REFERRAL_ERRORS_SETTING.get(settings); } @Override diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java index 8483cc65f47..bd65c3b6ecc 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java @@ -8,25 +8,32 @@ package org.elasticsearch.xpack.security.authc.ldap; import com.unboundid.ldap.sdk.Filter; import com.unboundid.ldap.sdk.LDAPConnection; import com.unboundid.ldap.sdk.LDAPConnectionOptions; +import com.unboundid.ldap.sdk.LDAPConnectionPool; import com.unboundid.ldap.sdk.LDAPException; +import com.unboundid.ldap.sdk.LDAPInterface; import com.unboundid.ldap.sdk.SearchResultEntry; +import com.unboundid.ldap.sdk.SimpleBindRequest; +import com.unboundid.ldap.sdk.controls.AuthorizationIdentityRequestControl; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.security.authc.RealmConfig; +import org.elasticsearch.xpack.security.authc.RealmSettings; import org.elasticsearch.xpack.security.authc.ldap.support.LdapMetaDataResolver; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession.GroupsResolver; import org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils; import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; +import org.elasticsearch.xpack.security.authc.support.CharArrays; import org.elasticsearch.xpack.ssl.SSLService; import java.util.HashSet; @@ -46,7 +53,7 @@ import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.sear * user entry in Active Directory that matches the user name). This eliminates the need for user templates, and simplifies * the configuration for windows admins that may not be familiar with LDAP concepts. */ -class ActiveDirectorySessionFactory extends SessionFactory { +class ActiveDirectorySessionFactory extends PoolingSessionFactory { static final String AD_DOMAIN_NAME_SETTING = "domain_name"; @@ -58,29 +65,42 @@ class ActiveDirectorySessionFactory extends SessionFactory { static final String AD_DOWN_LEVEL_USER_SEARCH_FILTER_SETTING = "user_search.down_level_filter"; static final String AD_USER_SEARCH_SCOPE_SETTING = "user_search.scope"; private static final String NETBIOS_NAME_FILTER_TEMPLATE = "(netbiosname={0})"; + private static final Setting POOL_ENABLED = Setting.boolSetting("user_search.pool.enabled", + settings -> Boolean.toString(PoolingSessionFactory.BIND_DN.exists(settings)), Setting.Property.NodeScope); final DefaultADAuthenticator defaultADAuthenticator; final DownLevelADAuthenticator downLevelADAuthenticator; final UpnADAuthenticator upnADAuthenticator; - ActiveDirectorySessionFactory(RealmConfig config, SSLService sslService) { - super(config, sslService); + ActiveDirectorySessionFactory(RealmConfig config, SSLService sslService) throws LDAPException { + super(config, sslService, new ActiveDirectoryGroupsResolver(config.settings()), POOL_ENABLED, () -> { + if (BIND_DN.exists(config.settings())) { + return new SimpleBindRequest(getBindDN(config.settings()), BIND_PASSWORD.get(config.settings())); + } else { + return new SimpleBindRequest(); + } + }, () -> { + if (BIND_DN.exists(config.settings())) { + final String healthCheckDn = BIND_DN.get(config.settings()); + if (healthCheckDn.isEmpty() && healthCheckDn.indexOf('=') > 0) { + return healthCheckDn; + } + } + return config.settings().get(AD_USER_SEARCH_BASEDN_SETTING, config.settings().get(AD_DOMAIN_NAME_SETTING)); + }); Settings settings = config.settings(); String domainName = settings.get(AD_DOMAIN_NAME_SETTING); if (domainName == null) { - throw new IllegalArgumentException("missing [" + AD_DOMAIN_NAME_SETTING + - "] setting for active directory"); + throw new IllegalArgumentException("missing [" + AD_DOMAIN_NAME_SETTING + "] setting for active directory"); } String domainDN = buildDnFromDomain(domainName); - GroupsResolver groupResolver = new ActiveDirectoryGroupsResolver(settings.getAsSettings("group_search"), domainDN, - ignoreReferralErrors); - LdapMetaDataResolver metaDataResolver = new LdapMetaDataResolver(config.settings(), ignoreReferralErrors); - defaultADAuthenticator = new DefaultADAuthenticator(config, timeout, ignoreReferralErrors, logger, groupResolver, + defaultADAuthenticator = new DefaultADAuthenticator(config, timeout, ignoreReferralErrors, logger, groupResolver, metaDataResolver, domainDN); downLevelADAuthenticator = new DownLevelADAuthenticator(config, timeout, ignoreReferralErrors, logger, groupResolver, metaDataResolver, domainDN, sslService); upnADAuthenticator = new UpnADAuthenticator(config, timeout, ignoreReferralErrors, logger, groupResolver, metaDataResolver, domainDN); + } @Override @@ -88,30 +108,78 @@ class ActiveDirectorySessionFactory extends SessionFactory { return new String[] {"ldap://" + settings.get(AD_DOMAIN_NAME_SETTING) + ":389"}; } - /** - * This is an active directory bind that looks up the user DN after binding with a windows principal. - * - * @param username name of the windows user without the domain - */ @Override - public void session(String username, SecureString password, ActionListener listener) { + void getSessionWithPool(LDAPConnectionPool connectionPool, String user, SecureString password, ActionListener listener) { + getADAuthenticator(user).authenticate(connectionPool, user, password, listener); + } + + @Override + void getSessionWithoutPool(String username, SecureString password, ActionListener listener) { // the runnable action here allows us make the control/flow logic simpler to understand. If we got a connection then lets // authenticate. If there was a failure pass it back using the listener Runnable runnable; try { final LDAPConnection connection = LdapUtils.privilegedConnect(serverSet::getConnection); runnable = () -> getADAuthenticator(username).authenticate(connection, username, password, - ActionListener.wrap(listener::onResponse, - (e) -> { - IOUtils.closeWhileHandlingException(connection); - listener.onFailure(e); - })); + ActionListener.wrap(listener::onResponse, + (e) -> { + IOUtils.closeWhileHandlingException(connection); + listener.onFailure(e); + })); } catch (LDAPException e) { runnable = () -> listener.onFailure(e); } runnable.run(); } + @Override + void getUnauthenticatedSessionWithPool(LDAPConnectionPool connectionPool, String user, ActionListener listener) { + getADAuthenticator(user).searchForDN(connectionPool, user, null, Math.toIntExact(timeout.seconds()), ActionListener.wrap(entry -> { + if (entry == null) { + listener.onResponse(null); + } else { + final String dn = entry.getDN(); + listener.onResponse(new LdapSession(logger, config, connectionPool, dn, groupResolver, metaDataResolver, timeout, null)); + } + }, listener::onFailure)); + } + + @Override + void getUnauthenticatedSessionWithoutPool(String user, ActionListener listener) { + if (BIND_DN.exists(config.settings())) { + LDAPConnection connection = null; + boolean startedSearching = false; + try { + connection = LdapUtils.privilegedConnect(serverSet::getConnection); + connection.bind(new SimpleBindRequest(getBindDN(config.settings()), BIND_PASSWORD.get(config.settings()))); + final LDAPConnection finalConnection = connection; + getADAuthenticator(user).searchForDN(finalConnection, user, null, Math.toIntExact(timeout.getSeconds()), + ActionListener.wrap(entry -> { + if (entry == null) { + IOUtils.closeWhileHandlingException(finalConnection); + listener.onResponse(null); + } else { + final String dn = entry.getDN(); + listener.onResponse(new LdapSession(logger, config, finalConnection, dn, groupResolver, metaDataResolver, + timeout, null)); + } + }, e -> { + IOUtils.closeWhileHandlingException(finalConnection); + listener.onFailure(e); + })); + startedSearching = true; + } catch (LDAPException e) { + listener.onFailure(e); + } finally { + if (connection != null && startedSearching == false) { + IOUtils.closeWhileHandlingException(connection); + } + } + } else { + listener.onResponse(null); + } + } + /** * @param domain active directory domain name * @return LDAP DN, distinguished name, of the root of the domain @@ -120,6 +188,14 @@ class ActiveDirectorySessionFactory extends SessionFactory { return "DC=" + domain.replace(".", ",DC="); } + static String getBindDN(Settings settings) { + String bindDN = BIND_DN.get(settings); + if (bindDN.isEmpty() == false && bindDN.indexOf('\\') < 0 && bindDN.indexOf('@') < 0 && bindDN.indexOf('=') < 0) { + bindDN = bindDN + "@" + settings.get(AD_DOMAIN_NAME_SETTING); + } + return bindDN; + } + public static Set> getSettings() { Set> settings = new HashSet<>(); settings.addAll(SessionFactory.getSettings()); @@ -131,6 +207,7 @@ class ActiveDirectorySessionFactory extends SessionFactory { settings.add(Setting.simpleString(AD_UPN_USER_SEARCH_FILTER_SETTING, Setting.Property.NodeScope)); settings.add(Setting.simpleString(AD_DOWN_LEVEL_USER_SEARCH_FILTER_SETTING, Setting.Property.NodeScope)); settings.add(Setting.simpleString(AD_USER_SEARCH_SCOPE_SETTING, Setting.Property.NodeScope)); + settings.addAll(PoolingSessionFactory.getSettings()); return settings; } @@ -154,6 +231,8 @@ class ActiveDirectorySessionFactory extends SessionFactory { final String userSearchDN; final LdapSearchScope userSearchScope; final String userSearchFilter; + final String bindDN; + final String bindPassword; // TODO this needs to be a setting in the secure settings store! ADAuthenticator(RealmConfig realm, TimeValue timeout, boolean ignoreReferralErrors, Logger logger, GroupsResolver groupsResolver, LdapMetaDataResolver metaDataResolver, String domainDN, @@ -165,6 +244,8 @@ class ActiveDirectorySessionFactory extends SessionFactory { this.groupsResolver = groupsResolver; this.metaDataResolver = metaDataResolver; final Settings settings = realm.settings(); + this.bindDN = getBindDN(settings); + this.bindPassword = BIND_PASSWORD.get(settings); userSearchDN = settings.get(AD_USER_SEARCH_BASEDN_SETTING, domainDN); userSearchScope = LdapSearchScope.resolve(settings.get(AD_USER_SEARCH_SCOPE_SETTING), LdapSearchScope.SUB_TREE); userSearchFilter = settings.get(userSearchFilterSetting, defaultUserSearchFilter); @@ -174,7 +255,11 @@ class ActiveDirectorySessionFactory extends SessionFactory { ActionListener listener) { boolean success = false; try { - connection.bind(bindUsername(username), new String(password.getChars())); + connection.bind(new SimpleBindRequest(bindUsername(username), CharArrays.toUtf8Bytes(password.getChars()), + new AuthorizationIdentityRequestControl())); + if (bindDN.isEmpty() == false) { + connection.bind(new SimpleBindRequest(bindDN, bindPassword)); + } searchForDN(connection, username, password, Math.toIntExact(timeout.seconds()), ActionListener.wrap((entry) -> { if (entry == null) { IOUtils.close(connection); @@ -200,6 +285,28 @@ class ActiveDirectorySessionFactory extends SessionFactory { } } + final void authenticate(LDAPConnectionPool pool, String username, SecureString password, + ActionListener listener) { + try { + LdapUtils.privilegedConnect(() -> { + SimpleBindRequest request = new SimpleBindRequest(bindUsername(username), CharArrays.toUtf8Bytes(password.getChars())); + return pool.bindAndRevertAuthentication(request); + }); + searchForDN(pool, username, password, Math.toIntExact(timeout.seconds()), ActionListener.wrap((entry) -> { + if (entry == null) { + // we did not find the user, cannot authenticate in this realm + listener.onFailure(new ElasticsearchSecurityException("search for user [" + username + + "] by principle name yielded no results")); + } else { + final String dn = entry.getDN(); + listener.onResponse(new LdapSession(logger, realm, pool, dn, groupsResolver, metaDataResolver, timeout, null)); + } + }, listener::onFailure)); + } catch (LDAPException e) { + listener.onFailure(e); + } + } + String bindUsername(String username) { return username; } @@ -209,7 +316,7 @@ class ActiveDirectorySessionFactory extends SessionFactory { return userSearchFilter; } - abstract void searchForDN(LDAPConnection connection, String username, SecureString password, int timeLimitSeconds, + abstract void searchForDN(LDAPInterface connection, String username, SecureString password, int timeLimitSeconds, ActionListener listener); } @@ -233,7 +340,7 @@ class ActiveDirectorySessionFactory extends SessionFactory { } @Override - void searchForDN(LDAPConnection connection, String username, SecureString password, + void searchForDN(LDAPInterface connection, String username, SecureString password, int timeLimitSeconds, ActionListener listener) { try { searchForEntry(connection, userSearchDN, userSearchScope.scope(), @@ -276,7 +383,7 @@ class ActiveDirectorySessionFactory extends SessionFactory { } @Override - void searchForDN(LDAPConnection connection, String username, SecureString password, int timeLimitSeconds, + void searchForDN(LDAPInterface connection, String username, SecureString password, int timeLimitSeconds, ActionListener listener) { String[] parts = username.split("\\\\"); assert parts.length == 2; @@ -285,7 +392,6 @@ class ActiveDirectorySessionFactory extends SessionFactory { netBiosDomainNameToDn(connection, netBiosDomainName, username, password, timeLimitSeconds, ActionListener.wrap((domainDN) -> { if (domainDN == null) { - IOUtils.close(connection); listener.onResponse(null); } else { try { @@ -294,75 +400,75 @@ class ActiveDirectorySessionFactory extends SessionFactory { accountName), timeLimitSeconds, ignoreReferralErrors, listener, attributesToSearchFor(groupsResolver.attributes())); } catch (LDAPException e) { - IOUtils.closeWhileHandlingException(connection); listener.onFailure(e); } } - }, (e) -> { - IOUtils.closeWhileHandlingException(connection); - listener.onFailure(e); - })); + }, listener::onFailure)); } - void netBiosDomainNameToDn(LDAPConnection connection, String netBiosDomainName, String username, SecureString password, + void netBiosDomainNameToDn(LDAPInterface ldapInterface, String netBiosDomainName, String username, SecureString password, int timeLimitSeconds, ActionListener listener) { final String cachedName = domainNameCache.get(netBiosDomainName); - if (cachedName != null) { - listener.onResponse(cachedName); - } else if (usingGlobalCatalog(settings, connection)) { - // the global catalog does not replicate the necessary information to map a netbios - // dns name to a DN so we need to instead connect to the normal ports. This code - // uses the standard ports to avoid adding even more settings and is probably ok as - // most AD users do not use non-standard ports - final LDAPConnectionOptions options = connectionOptions(config, sslService, logger); - boolean startedSearching = false; - LDAPConnection searchConnection = null; - try { - Filter filter = createFilter(NETBIOS_NAME_FILTER_TEMPLATE, netBiosDomainName); - if (connection.getSSLSession() != null) { + try { + if (cachedName != null) { + listener.onResponse(cachedName); + } else if (usingGlobalCatalog(ldapInterface)) { + // the global catalog does not replicate the necessary information to map a netbios + // dns name to a DN so we need to instead connect to the normal ports. This code + // uses the standard ports to avoid adding even more settings and is probably ok as + // most AD users do not use non-standard ports + final LDAPConnectionOptions options = connectionOptions(config, sslService, logger); + boolean startedSearching = false; + LDAPConnection searchConnection = null; + LDAPConnection ldapConnection = null; + try { + Filter filter = createFilter(NETBIOS_NAME_FILTER_TEMPLATE, netBiosDomainName); + if (ldapInterface instanceof LDAPConnection) { + ldapConnection = (LDAPConnection) ldapInterface; + } else { + ldapConnection = LdapUtils.privilegedConnect(((LDAPConnectionPool) ldapInterface)::getConnection); + } + final LDAPConnection finalLdapConnection = ldapConnection; searchConnection = LdapUtils.privilegedConnect( - () -> new LDAPConnection(connection.getSocketFactory(), options, - connection.getConnectedAddress(), 636)); - } else { - searchConnection = LdapUtils.privilegedConnect(() -> - new LDAPConnection(options, connection.getConnectedAddress(), 389)); + () -> new LDAPConnection(finalLdapConnection.getSocketFactory(), options, + finalLdapConnection.getConnectedAddress(), + finalLdapConnection.getSSLSession() != null ? 636 : 389)); + + final SimpleBindRequest bindRequest = + bindDN.isEmpty() ? new SimpleBindRequest(username, CharArrays.toUtf8Bytes(password.getChars())) : + new SimpleBindRequest(bindDN, bindPassword); + searchConnection.bind(bindRequest); + final LDAPConnection finalConnection = searchConnection; + search(finalConnection, domainDN, LdapSearchScope.SUB_TREE.scope(), filter, + timeLimitSeconds, ignoreReferralErrors, ActionListener.wrap( + (results) -> { + IOUtils.close(finalConnection); + handleSearchResults(results, netBiosDomainName, domainNameCache, listener); + }, (e) -> { + IOUtils.closeWhileHandlingException(finalConnection); + listener.onFailure(e); + }), + "ncname"); + startedSearching = true; + } finally { + if (startedSearching == false) { + IOUtils.closeWhileHandlingException(searchConnection); + } + if (ldapInterface instanceof LDAPConnectionPool && ldapConnection != null) { + ((LDAPConnectionPool) ldapInterface).releaseConnection(ldapConnection); + } } - searchConnection.bind(username, new String(password.getChars())); - final LDAPConnection finalConnection = searchConnection; - search(finalConnection, domainDN, LdapSearchScope.SUB_TREE.scope(), filter, - timeLimitSeconds, ignoreReferralErrors, ActionListener.wrap( - (results) -> { - IOUtils.close(finalConnection); - handleSearchResults(results, netBiosDomainName, - domainNameCache, listener); - }, (e) -> { - IOUtils.closeWhileHandlingException(connection); - listener.onFailure(e); - }), - "ncname"); - startedSearching = true; - } catch (LDAPException e) { - listener.onFailure(e); - } finally { - if (startedSearching == false) { - IOUtils.closeWhileHandlingException(searchConnection); - } - } - } else { - try { + } else { Filter filter = createFilter(NETBIOS_NAME_FILTER_TEMPLATE, netBiosDomainName); - search(connection, domainDN, LdapSearchScope.SUB_TREE.scope(), filter, + search(ldapInterface, domainDN, LdapSearchScope.SUB_TREE.scope(), filter, timeLimitSeconds, ignoreReferralErrors, ActionListener.wrap( (results) -> handleSearchResults(results, netBiosDomainName, domainNameCache, listener), - (e) -> { - IOUtils.closeWhileHandlingException(connection); - listener.onFailure(e); - }), + listener::onFailure), "ncname"); - } catch (LDAPException e) { - listener.onFailure(e); } + } catch (LDAPException e) { + listener.onFailure(e); } } @@ -385,35 +491,55 @@ class ActiveDirectorySessionFactory extends SessionFactory { } } - static boolean usingGlobalCatalog(Settings settings, LDAPConnection ldapConnection) { - Boolean usingGlobalCatalog = settings.getAsBoolean("global_catalog", null); - if (usingGlobalCatalog != null) { - return usingGlobalCatalog; + static boolean usingGlobalCatalog(LDAPInterface ldap) throws LDAPException { + if (ldap instanceof LDAPConnection) { + return usingGlobalCatalog((LDAPConnection) ldap); + } else { + LDAPConnectionPool pool = (LDAPConnectionPool) ldap; + LDAPConnection connection = null; + try { + connection = LdapUtils.privilegedConnect(pool::getConnection); + return usingGlobalCatalog(connection); + } finally { + if (connection != null) { + pool.releaseConnection(connection); + } + } } + } + + private static boolean usingGlobalCatalog(LDAPConnection ldapConnection) { return ldapConnection.getConnectedPort() == 3268 || ldapConnection.getConnectedPort() == 3269; } } + /** + * Authenticates user principal names provided by the user (eq user@domain). Note this authenticator does not currently support + * UPN suffixes that are different than the actual domain name. + */ static class UpnADAuthenticator extends ADAuthenticator { - static final String UPN_USER_FILTER = "(&(objectClass=user)(|(sAMAccountName={0})(userPrincipalName={1})))"; + static final String UPN_USER_FILTER = "(&(objectClass=user)(userPrincipalName={1}))"; UpnADAuthenticator(RealmConfig config, TimeValue timeout, boolean ignoreReferralErrors, Logger logger, GroupsResolver groupsResolver, LdapMetaDataResolver metaDataResolver, String domainDN) { super(config, timeout, ignoreReferralErrors, logger, groupsResolver, metaDataResolver, domainDN, AD_UPN_USER_SEARCH_FILTER_SETTING, UPN_USER_FILTER); + if (userSearchFilter.contains("{0}")) { + new DeprecationLogger(logger).deprecated("The use of the account name variable {0} in the setting [" + + RealmSettings.getFullSettingKey(config, AD_UPN_USER_SEARCH_FILTER_SETTING) + + "] has been deprecated and will be removed in a future version!"); + } } - void searchForDN(LDAPConnection connection, String username, SecureString password, int timeLimitSeconds, + void searchForDN(LDAPInterface connection, String username, SecureString password, int timeLimitSeconds, ActionListener listener) { String[] parts = username.split("@"); - assert parts.length == 2; + assert parts.length == 2 : "there should have only been two values for " + username + " after splitting on '@'"; final String accountName = parts[0]; - final String domainName = parts[1]; - final String domainDN = buildDnFromDomain(domainName); try { - Filter filter = createFilter(UPN_USER_FILTER, accountName, username); - searchForEntry(connection, domainDN, LdapSearchScope.SUB_TREE.scope(), filter, + Filter filter = createFilter(userSearchFilter, accountName, username); + searchForEntry(connection, userSearchDN, LdapSearchScope.SUB_TREE.scope(), filter, timeLimitSeconds, ignoreReferralErrors, listener, attributesToSearchFor(groupsResolver.attributes())); } catch (LDAPException e) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java index fe52fece154..38cc1d00277 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealm.java @@ -5,13 +5,6 @@ */ package org.elasticsearch.xpack.security.authc.ldap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Consumer; -import java.util.function.Supplier; - import com.unboundid.ldap.sdk.LDAPException; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; @@ -28,7 +21,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.RealmSettings; import org.elasticsearch.xpack.security.authc.ldap.support.LdapLoadBalancing; @@ -44,6 +37,13 @@ import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingSt import org.elasticsearch.xpack.security.user.User; import org.elasticsearch.xpack.ssl.SSLService; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; +import java.util.function.Supplier; + /** * Authenticates username/password tokens against ldap, locates groups and maps them to roles. @@ -142,7 +142,7 @@ public final class LdapRealm extends CachingUsernamePasswordRealm { * This user will then be passed to the listener */ @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { // we submit to the threadpool because authentication using LDAP will execute blocking I/O for a bind request and we don't want // network threads stuck waiting for a socket to connect. After the bind, then all interaction with LDAP should be async final CancellableLdapRunnable cancellableLdapRunnable = new CancellableLdapRunnable(listener, @@ -153,17 +153,19 @@ public final class LdapRealm extends CachingUsernamePasswordRealm { } @Override - protected void doLookupUser(String username, ActionListener listener) { + protected void doLookupUser(String username, ActionListener userActionListener) { if (sessionFactory.supportsUnauthenticatedSession()) { // we submit to the threadpool because authentication using LDAP will execute blocking I/O for a bind request and we don't want // network threads stuck waiting for a socket to connect. After the bind, then all interaction with LDAP should be async - final CancellableLdapRunnable cancellableLdapRunnable = new CancellableLdapRunnable(listener, + final ActionListener sessionListener = ActionListener.wrap(AuthenticationResult::getUser, + userActionListener::onFailure); + final CancellableLdapRunnable cancellableLdapRunnable = new CancellableLdapRunnable(userActionListener, () -> sessionFactory.unauthenticatedSession(username, - contextPreservingListener(new LdapSessionActionListener("lookup", username, listener))), logger); + contextPreservingListener(new LdapSessionActionListener("lookup", username, sessionListener))), logger); threadPool.generic().execute(cancellableLdapRunnable); threadPool.schedule(executionTimeout, Names.SAME, cancellableLdapRunnable::maybeTimeout); } else { - listener.onResponse(null); + userActionListener.onResponse(null); } } @@ -188,7 +190,8 @@ public final class LdapRealm extends CachingUsernamePasswordRealm { return usage; } - private static void buildUser(LdapSession session, String username, ActionListener listener, UserRoleMapper roleMapper) { + private static void buildUser(LdapSession session, String username, ActionListener listener, + UserRoleMapper roleMapper) { if (session == null) { listener.onResponse(null); } else { @@ -210,8 +213,8 @@ public final class LdapRealm extends CachingUsernamePasswordRealm { roles -> { IOUtils.close(session); String[] rolesArray = roles.toArray(new String[roles.size()]); - listener.onResponse( - new User(username, rolesArray, null, null, metadata, true) + listener.onResponse(AuthenticationResult.success( + new User(username, rolesArray, null, null, metadata, true)) ); }, onFailure )); @@ -236,21 +239,21 @@ public final class LdapRealm extends CachingUsernamePasswordRealm { private final AtomicReference ldapSessionAtomicReference = new AtomicReference<>(); private String action; private final String username; - private final ActionListener userActionListener; + private final ActionListener resultListener; - LdapSessionActionListener(String action, String username, ActionListener userActionListener) { + LdapSessionActionListener(String action, String username, ActionListener resultListener) { this.action = action; this.username = username; - this.userActionListener = userActionListener; + this.resultListener = resultListener; } @Override public void onResponse(LdapSession session) { if (session == null) { - userActionListener.onResponse(null); + resultListener.onResponse(null); } else { ldapSessionAtomicReference.set(session); - buildUser(session, username, userActionListener, roleMapper); + buildUser(session, username, resultListener, roleMapper); } } @@ -262,8 +265,7 @@ public final class LdapRealm extends CachingUsernamePasswordRealm { if (logger.isDebugEnabled()) { logger.debug(new ParameterizedMessage("Exception occurred during {} for {}", action, LdapRealm.this), e); } - setFailedAuthenticationDetails(action + " failed", e); - userActionListener.onResponse(null); + resultListener.onResponse(AuthenticationResult.unsuccessful(action + " failed", e)); } } @@ -276,11 +278,11 @@ public final class LdapRealm extends CachingUsernamePasswordRealm { static class CancellableLdapRunnable extends AbstractRunnable { private final Runnable in; - private final ActionListener listener; + private final ActionListener listener; private final Logger logger; private final AtomicReference state = new AtomicReference<>(LdapRunnableState.AWAITING_EXECUTION); - CancellableLdapRunnable(ActionListener listener, Runnable in, Logger logger) { + CancellableLdapRunnable(ActionListener listener, Runnable in, Logger logger) { this.listener = listener; this.in = in; this.logger = logger; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java index 0da1bcca05c..553e13e67e9 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactory.java @@ -5,25 +5,20 @@ */ package org.elasticsearch.xpack.security.authc.ldap; -import com.unboundid.ldap.sdk.GetEntryLDAPConnectionPoolHealthCheck; +import com.unboundid.ldap.sdk.Filter; import com.unboundid.ldap.sdk.LDAPConnection; import com.unboundid.ldap.sdk.LDAPConnectionPool; -import com.unboundid.ldap.sdk.LDAPConnectionPoolHealthCheck; import com.unboundid.ldap.sdk.LDAPException; import com.unboundid.ldap.sdk.LDAPInterface; import com.unboundid.ldap.sdk.SearchResultEntry; -import com.unboundid.ldap.sdk.ServerSet; import com.unboundid.ldap.sdk.SimpleBindRequest; -import org.apache.logging.log4j.Logger; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.RealmSettings; -import org.elasticsearch.xpack.security.authc.ldap.support.LdapMetaDataResolver; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession.GroupsResolver; @@ -35,59 +30,41 @@ import org.elasticsearch.xpack.ssl.SSLService; import java.util.Arrays; import java.util.HashSet; -import java.util.Optional; import java.util.Set; import java.util.function.Function; -import static com.unboundid.ldap.sdk.Filter.createEqualityFilter; -import static com.unboundid.ldap.sdk.Filter.encodeValue; import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.attributesToSearchFor; +import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.createFilter; import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.searchForEntry; -class LdapUserSearchSessionFactory extends SessionFactory { +class LdapUserSearchSessionFactory extends PoolingSessionFactory { - static final int DEFAULT_CONNECTION_POOL_SIZE = 20; - static final int DEFAULT_CONNECTION_POOL_INITIAL_SIZE = 0; - static final String DEFAULT_USERNAME_ATTRIBUTE = "uid"; - static final TimeValue DEFAULT_HEALTH_CHECK_INTERVAL = TimeValue.timeValueSeconds(60L); + private static final String DEFAULT_USERNAME_ATTRIBUTE = "uid"; static final String SEARCH_PREFIX = "user_search."; + static final Setting SEARCH_ATTRIBUTE = new Setting<>("user_search.attribute", DEFAULT_USERNAME_ATTRIBUTE, + Function.identity(), Setting.Property.NodeScope, Setting.Property.Deprecated); private static final Setting SEARCH_BASE_DN = Setting.simpleString("user_search.base_dn", Setting.Property.NodeScope); - private static final Setting SEARCH_ATTRIBUTE = new Setting<>("user_search.attribute", DEFAULT_USERNAME_ATTRIBUTE, - Function.identity(), Setting.Property.NodeScope); + private static final Setting SEARCH_FILTER = Setting.simpleString("user_search.filter", Setting.Property.NodeScope); private static final Setting SEARCH_SCOPE = new Setting<>("user_search.scope", (String) null, s -> LdapSearchScope.resolve(s, LdapSearchScope.SUB_TREE), Setting.Property.NodeScope); - - private static final Setting POOL_ENABLED = Setting.boolSetting("user_search.pool.enabled", - true, Setting.Property.NodeScope); - private static final Setting POOL_INITIAL_SIZE = Setting.intSetting("user_search.pool.initial_size", - DEFAULT_CONNECTION_POOL_INITIAL_SIZE, 0, Setting.Property.NodeScope); - private static final Setting POOL_SIZE = Setting.intSetting("user_search.pool.size", - DEFAULT_CONNECTION_POOL_SIZE, 1, Setting.Property.NodeScope); - private static final Setting HEALTH_CHECK_INTERVAL = Setting.timeSetting("user_search.pool.health_check.interval", - DEFAULT_HEALTH_CHECK_INTERVAL, Setting.Property.NodeScope); - private static final Setting HEALTH_CHECK_ENABLED = Setting.boolSetting("user_search.pool.health_check.enabled", - true, Setting.Property.NodeScope); - private static final Setting> HEALTH_CHECK_DN = new Setting<>("user_search.pool.health_check.dn", (String) null, - Optional::ofNullable, Setting.Property.NodeScope); - - private static final Setting BIND_DN = Setting.simpleString("bind_dn", - Setting.Property.NodeScope, Setting.Property.Filtered); - private static final Setting BIND_PASSWORD = Setting.simpleString("bind_password", - Setting.Property.NodeScope, Setting.Property.Filtered); + private static final Setting POOL_ENABLED = Setting.boolSetting("user_search.pool.enabled", true, Setting.Property.NodeScope); private final String userSearchBaseDn; private final LdapSearchScope scope; - private final String userAttribute; - private final GroupsResolver groupResolver; - private final boolean useConnectionPool; - - private final LDAPConnectionPool connectionPool; - private final LdapMetaDataResolver metaDataResolver; + private final String searchFilter; LdapUserSearchSessionFactory(RealmConfig config, SSLService sslService) throws LDAPException { - super(config, sslService); + super(config, sslService, groupResolver(config.settings()), POOL_ENABLED, + () -> LdapUserSearchSessionFactory.bindRequest(config.settings()), + () -> { + if (BIND_DN.exists(config.settings())) { + return BIND_DN.get(config.settings()); + } else { + return SEARCH_BASE_DN.get(config.settings()); + } + }); Settings settings = config.settings(); if (SEARCH_BASE_DN.exists(settings)) { userSearchBaseDn = SEARCH_BASE_DN.get(settings); @@ -95,56 +72,9 @@ class LdapUserSearchSessionFactory extends SessionFactory { throw new IllegalArgumentException("[" + RealmSettings.getFullSettingKey(config, SEARCH_BASE_DN) + "] must be specified"); } scope = SEARCH_SCOPE.get(settings); - userAttribute = SEARCH_ATTRIBUTE.get(settings); - groupResolver = groupResolver(settings); - metaDataResolver = new LdapMetaDataResolver(config.settings(), ignoreReferralErrors); - useConnectionPool = POOL_ENABLED.get(settings); - if (useConnectionPool) { - connectionPool = createConnectionPool(config, serverSet, timeout, logger); - } else { - connectionPool = null; - } - logger.info("Realm [{}] is in user-search mode - base_dn=[{}], attribute=[{}]", - config.name(), userSearchBaseDn, userAttribute); - } - - static LDAPConnectionPool createConnectionPool(RealmConfig config, ServerSet serverSet, TimeValue timeout, Logger logger) - throws LDAPException { - Settings settings = config.settings(); - SimpleBindRequest bindRequest = bindRequest(settings); - final int initialSize = POOL_INITIAL_SIZE.get(settings); - final int size = POOL_SIZE.get(settings); - LDAPConnectionPool pool = null; - boolean success = false; - try { - pool = LdapUtils.privilegedConnect(() -> new LDAPConnectionPool(serverSet, bindRequest, initialSize, size)); - pool.setRetryFailedOperationsDueToInvalidConnections(true); - if (HEALTH_CHECK_ENABLED.get(settings)) { - String entryDn = HEALTH_CHECK_DN.get(settings).orElseGet(() -> bindRequest == null ? null : bindRequest.getBindDN()); - final long healthCheckInterval = HEALTH_CHECK_INTERVAL.get(settings).millis(); - if (entryDn != null) { - // Checks the status of the LDAP connection at a specified interval in the background. We do not check on - // on create as the LDAP server may require authentication to get an entry and a bind request has not been executed - // yet so we could end up never getting a connection. We do not check on checkout as we always set retry operations - // and the pool will handle a bad connection without the added latency on every operation - LDAPConnectionPoolHealthCheck healthCheck = new GetEntryLDAPConnectionPoolHealthCheck(entryDn, timeout.millis(), - false, false, false, true, false); - pool.setHealthCheck(healthCheck); - pool.setHealthCheckIntervalMillis(healthCheckInterval); - } else { - logger.warn("[" + RealmSettings.getFullSettingKey(config, BIND_DN) + "] and [" + - RealmSettings.getFullSettingKey(config, HEALTH_CHECK_DN) + "] have not been specified so no " + - "ldap query will be run as a health check"); - } - } - - success = true; - return pool; - } finally { - if (success == false && pool != null) { - pool.close(); - } - } + searchFilter = getSearchFilter(config); + logger.info("Realm [{}] is in user-search mode - base_dn=[{}], search filter=[{}]", + config.name(), userSearchBaseDn, searchFilter); } static SimpleBindRequest bindRequest(Settings settings) { @@ -155,23 +85,15 @@ class LdapUserSearchSessionFactory extends SessionFactory { } } - public static boolean hasUserSearchSettings(RealmConfig config) { + static boolean hasUserSearchSettings(RealmConfig config) { return config.settings().getByPrefix("user_search.").isEmpty() == false; } - @Override - public void session(String user, SecureString password, ActionListener listener) { - if (useConnectionPool) { - getSessionWithPool(user, password, listener); - } else { - getSessionWithoutPool(user, password, listener); - } - } - /** * Sets up a LDAPSession using the connection pool that potentially holds existing connections to the server */ - private void getSessionWithPool(String user, SecureString password, ActionListener listener) { + @Override + void getSessionWithPool(LDAPConnectionPool connectionPool, String user, SecureString password, ActionListener listener) { findUser(user, connectionPool, ActionListener.wrap((entry) -> { if (entry == null) { listener.onResponse(null); @@ -203,7 +125,8 @@ class LdapUserSearchSessionFactory extends SessionFactory { *
  • Creates a new LDAPSession with the bound connection
  • * */ - private void getSessionWithoutPool(String user, SecureString password, ActionListener listener) { + @Override + void getSessionWithoutPool(String user, SecureString password, ActionListener listener) { boolean success = false; LDAPConnection connection = null; try { @@ -260,33 +183,42 @@ class LdapUserSearchSessionFactory extends SessionFactory { } @Override - public void unauthenticatedSession(String user, ActionListener listener) { + void getUnauthenticatedSessionWithPool(LDAPConnectionPool connectionPool, String user, ActionListener listener) { + findUser(user, connectionPool, ActionListener.wrap((entry) -> { + if (entry == null) { + listener.onResponse(null); + } else { + final String dn = entry.getDN(); + LdapSession session = new LdapSession(logger, config, connectionPool, dn, groupResolver, metaDataResolver, timeout, + entry.getAttributes()); + listener.onResponse(session); + } + }, listener::onFailure)); + } + + @Override + void getUnauthenticatedSessionWithoutPool(String user, ActionListener listener) { LDAPConnection connection = null; boolean success = false; try { - final LDAPInterface ldapInterface; - if (useConnectionPool) { - ldapInterface = connectionPool; - } else { - connection = LdapUtils.privilegedConnect(serverSet::getConnection); - connection.bind(bindRequest(config.settings())); - ldapInterface = connection; - } + connection = LdapUtils.privilegedConnect(serverSet::getConnection); + connection.bind(bindRequest(config.settings())); + final LDAPConnection finalConnection = connection; - findUser(user, ldapInterface, ActionListener.wrap((entry) -> { + findUser(user, finalConnection, ActionListener.wrap((entry) -> { if (entry == null) { listener.onResponse(null); } else { boolean sessionCreated = false; try { final String dn = entry.getDN(); - LdapSession session = new LdapSession(logger, config, ldapInterface, dn, groupResolver, metaDataResolver, timeout, + LdapSession session = new LdapSession(logger, config, finalConnection, dn, groupResolver, metaDataResolver, timeout, entry.getAttributes()); sessionCreated = true; listener.onResponse(session); } finally { - if (sessionCreated == false && useConnectionPool == false) { - IOUtils.close((LDAPConnection) ldapInterface); + if (sessionCreated == false) { + IOUtils.close(finalConnection); } } } @@ -302,41 +234,52 @@ class LdapUserSearchSessionFactory extends SessionFactory { } private void findUser(String user, LDAPInterface ldapInterface, ActionListener listener) { + final Filter filter; + try { + filter = createFilter(searchFilter, user); + } catch (LDAPException e) { + listener.onFailure(e); + return; + } + searchForEntry(ldapInterface, userSearchBaseDn, scope.scope(), - createEqualityFilter(userAttribute, encodeValue(user)), Math.toIntExact(timeout.seconds()), ignoreReferralErrors, listener, + filter, Math.toIntExact(timeout.seconds()), ignoreReferralErrors, listener, attributesToSearchFor(groupResolver.attributes(), metaDataResolver.attributeNames())); } - /* - * This method is used to cleanup the connections - */ - void shutdown() { - if (connectionPool != null) { - connectionPool.close(); - } - } - - static GroupsResolver groupResolver(Settings settings) { + private static GroupsResolver groupResolver(Settings settings) { if (SearchGroupsResolver.BASE_DN.exists(settings)) { return new SearchGroupsResolver(settings); } return new UserAttributeGroupsResolver(settings); } + static String getSearchFilter(RealmConfig config) { + final Settings settings = config.settings(); + final boolean hasAttribute = SEARCH_ATTRIBUTE.exists(settings); + final boolean hasFilter = SEARCH_FILTER.exists(settings); + if (hasAttribute && hasFilter) { + throw new IllegalArgumentException("search attribute setting [" + + RealmSettings.getFullSettingKey(config, SEARCH_ATTRIBUTE) + "] and filter setting [" + + RealmSettings.getFullSettingKey(config, SEARCH_FILTER) + "] cannot be combined!"); + } else if (hasFilter) { + return SEARCH_FILTER.get(settings); + } else if (hasAttribute) { + return "(" + SEARCH_ATTRIBUTE.get(settings) + "={0})"; + } else { + return "(uid={0})"; + } + } + public static Set> getSettings() { Set> settings = new HashSet<>(); settings.addAll(SessionFactory.getSettings()); + settings.addAll(PoolingSessionFactory.getSettings()); settings.add(SEARCH_BASE_DN); settings.add(SEARCH_SCOPE); settings.add(SEARCH_ATTRIBUTE); settings.add(POOL_ENABLED); - settings.add(POOL_INITIAL_SIZE); - settings.add(POOL_SIZE); - settings.add(HEALTH_CHECK_ENABLED); - settings.add(HEALTH_CHECK_DN); - settings.add(HEALTH_CHECK_INTERVAL); - settings.add(BIND_DN); - settings.add(BIND_PASSWORD); + settings.add(SEARCH_FILTER); settings.addAll(SearchGroupsResolver.getSettings()); settings.addAll(UserAttributeGroupsResolver.getSettings()); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java new file mode 100644 index 00000000000..253bdb64ca1 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java @@ -0,0 +1,185 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.ldap; + +import com.unboundid.ldap.sdk.BindRequest; +import com.unboundid.ldap.sdk.GetEntryLDAPConnectionPoolHealthCheck; +import com.unboundid.ldap.sdk.LDAPConnectionPool; +import com.unboundid.ldap.sdk.LDAPConnectionPoolHealthCheck; +import com.unboundid.ldap.sdk.LDAPException; +import com.unboundid.ldap.sdk.ServerSet; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.xpack.security.authc.RealmConfig; +import org.elasticsearch.xpack.security.authc.RealmSettings; +import org.elasticsearch.xpack.security.authc.ldap.support.LdapMetaDataResolver; +import org.elasticsearch.xpack.security.authc.ldap.support.LdapSession; +import org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils; +import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; +import org.elasticsearch.xpack.ssl.SSLService; + +import java.util.Optional; +import java.util.Set; +import java.util.function.Supplier; + +/** + * Base class for LDAP session factories that can make use of a connection pool + */ +abstract class PoolingSessionFactory extends SessionFactory implements Releasable { + + static final int DEFAULT_CONNECTION_POOL_SIZE = 20; + static final int DEFAULT_CONNECTION_POOL_INITIAL_SIZE = 0; + static final Setting BIND_DN = Setting.simpleString("bind_dn", Setting.Property.NodeScope, Setting.Property.Filtered); + static final Setting BIND_PASSWORD = Setting.simpleString("bind_password", Setting.Property.NodeScope, + Setting.Property.Filtered); + + private static final TimeValue DEFAULT_HEALTH_CHECK_INTERVAL = TimeValue.timeValueSeconds(60L); + private static final Setting POOL_INITIAL_SIZE = Setting.intSetting("user_search.pool.initial_size", + DEFAULT_CONNECTION_POOL_INITIAL_SIZE, 0, Setting.Property.NodeScope); + private static final Setting POOL_SIZE = Setting.intSetting("user_search.pool.size", + DEFAULT_CONNECTION_POOL_SIZE, 1, Setting.Property.NodeScope); + private static final Setting HEALTH_CHECK_INTERVAL = Setting.timeSetting("user_search.pool.health_check.interval", + DEFAULT_HEALTH_CHECK_INTERVAL, Setting.Property.NodeScope); + private static final Setting HEALTH_CHECK_ENABLED = Setting.boolSetting("user_search.pool.health_check.enabled", + true, Setting.Property.NodeScope); + private static final Setting> HEALTH_CHECK_DN = new Setting<>("user_search.pool.health_check.dn", (String) null, + Optional::ofNullable, Setting.Property.NodeScope); + + private final boolean useConnectionPool; + private final LDAPConnectionPool connectionPool; + + final LdapMetaDataResolver metaDataResolver; + final LdapSession.GroupsResolver groupResolver; + + + /** + * @param config the configuration for the realm + * @param sslService the ssl service to get a socket factory or context from + * @param groupResolver the resolver to use to find groups belonging to a user + * @param poolingEnabled the setting that should be used to determine if connection pooling is enabled + * @param bindRequestSupplier the supplier for a bind requests that should be used for pooled connections + * @param healthCheckDNSupplier a supplier for the dn to query for health checks + */ + PoolingSessionFactory(RealmConfig config, SSLService sslService, LdapSession.GroupsResolver groupResolver, + Setting poolingEnabled, Supplier bindRequestSupplier, + Supplier healthCheckDNSupplier) throws LDAPException { + super(config, sslService); + this.groupResolver = groupResolver; + this.metaDataResolver = new LdapMetaDataResolver(config.settings(), ignoreReferralErrors); + this.useConnectionPool = poolingEnabled.get(config.settings()); + if (useConnectionPool) { + this.connectionPool = createConnectionPool(config, serverSet, timeout, logger, bindRequestSupplier, healthCheckDNSupplier); + } else { + this.connectionPool = null; + } + } + + @Override + public final void session(String user, SecureString password, ActionListener listener) { + if (useConnectionPool) { + getSessionWithPool(connectionPool, user, password, listener); + } else { + getSessionWithoutPool(user, password, listener); + } + } + + @Override + public final void unauthenticatedSession(String user, ActionListener listener) { + if (useConnectionPool) { + getUnauthenticatedSessionWithPool(connectionPool, user, listener); + } else { + getUnauthenticatedSessionWithoutPool(user, listener); + } + } + + /** + * Attempts to get a {@link LdapSession} using the provided credentials and makes use of the provided connection pool + */ + abstract void getSessionWithPool(LDAPConnectionPool connectionPool, String user, SecureString password, + ActionListener listener); + + /** + * Attempts to get a {@link LdapSession} using the provided credentials and opens a new connection to the ldap server + */ + abstract void getSessionWithoutPool(String user, SecureString password, ActionListener listener); + + /** + * Attempts to search using a pooled connection for the user and provides an unauthenticated {@link LdapSession} to the listener if the + * user is found + */ + abstract void getUnauthenticatedSessionWithPool(LDAPConnectionPool connectionPool, String user, ActionListener listener); + + /** + * Attempts to search using a new connection for the user and provides an unauthenticated {@link LdapSession} to the listener if the + * user is found + */ + abstract void getUnauthenticatedSessionWithoutPool(String user, ActionListener listener); + + /** + * Creates the connection pool that will be used by the session factory and initializes the health check support + */ + static LDAPConnectionPool createConnectionPool(RealmConfig config, ServerSet serverSet, TimeValue timeout, Logger logger, + Supplier bindRequestSupplier, + Supplier healthCheckDnSupplier) throws LDAPException { + Settings settings = config.settings(); + BindRequest bindRequest = bindRequestSupplier.get(); + final int initialSize = POOL_INITIAL_SIZE.get(settings); + final int size = POOL_SIZE.get(settings); + LDAPConnectionPool pool = null; + boolean success = false; + try { + pool = LdapUtils.privilegedConnect(() -> new LDAPConnectionPool(serverSet, bindRequest, initialSize, size)); + pool.setRetryFailedOperationsDueToInvalidConnections(true); + if (HEALTH_CHECK_ENABLED.get(settings)) { + String entryDn = HEALTH_CHECK_DN.get(settings).orElseGet(healthCheckDnSupplier); + final long healthCheckInterval = HEALTH_CHECK_INTERVAL.get(settings).millis(); + if (entryDn != null) { + // Checks the status of the LDAP connection at a specified interval in the background. We do not check on + // create as the LDAP server may require authentication to get an entry and a bind request has not been executed + // yet so we could end up never getting a connection. We do not check on checkout as we always set retry operations + // and the pool will handle a bad connection without the added latency on every operation + LDAPConnectionPoolHealthCheck healthCheck = new GetEntryLDAPConnectionPoolHealthCheck(entryDn, timeout.millis(), + false, false, false, true, false); + pool.setHealthCheck(healthCheck); + pool.setHealthCheckIntervalMillis(healthCheckInterval); + } else { + logger.warn(new ParameterizedMessage("[{}] and [{}} have not been specified or are not valid distinguished names," + + "so connection health checking is disabled", RealmSettings.getFullSettingKey(config, BIND_DN), + RealmSettings.getFullSettingKey(config, HEALTH_CHECK_DN))); + } + } + + success = true; + return pool; + } finally { + if (success == false && pool != null) { + pool.close(); + } + } + } + + /** + * This method is used to cleanup the connection pool if one is being used + */ + @Override + public final void close() { + if (connectionPool != null) { + connectionPool.close(); + } + } + + public static Set> getSettings() { + return Sets.newHashSet(POOL_INITIAL_SIZE, POOL_SIZE, HEALTH_CHECK_ENABLED, HEALTH_CHECK_INTERVAL, HEALTH_CHECK_DN, BIND_DN, + BIND_PASSWORD); + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolver.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolver.java index 08d3ea8bc1d..3494d7e1b2b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolver.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolver.java @@ -110,7 +110,7 @@ class SearchGroupsResolver implements GroupsResolver { private void getUserId(String dn, Collection attributes, LDAPInterface connection, TimeValue timeout, ActionListener listener) { - if (isNullOrEmpty(userAttribute)) { + if (isNullOrEmpty(userAttribute) || userAttribute.equals("dn")) { listener.onResponse(dn); } else if (attributes != null) { final String value = attributes.stream() diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java index d4ae892c3c6..666a4b06c24 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapUtils.java @@ -48,8 +48,7 @@ import java.util.stream.Collectors; public final class LdapUtils { - public static final Filter OBJECT_CLASS_PRESENCE_FILTER = - Filter.createPresenceFilter("objectClass"); + public static final Filter OBJECT_CLASS_PRESENCE_FILTER = Filter.createPresenceFilter("objectClass"); private static final Logger LOGGER = ESLoggerFactory.getLogger(LdapUtils.class); @@ -320,7 +319,7 @@ public final class LdapUtils { : attributes.toArray(new String[attributes.size()]); } - static String[] encodeFilterValues(String... arguments) { + private static String[] encodeFilterValues(String... arguments) { for (int i = 0; i < arguments.length; i++) { arguments[i] = Filter.encodeValue(arguments[i]); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java index 32546c68cc5..3e9b27df7d1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java @@ -17,17 +17,17 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.security.authc.IncomingRequest; -import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; -import org.elasticsearch.xpack.security.authc.support.mapper.CompositeRoleMapper; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import org.elasticsearch.xpack.ssl.CertUtils; -import org.elasticsearch.xpack.ssl.SSLConfigurationSettings; -import org.elasticsearch.xpack.security.user.User; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.AuthenticationToken; import org.elasticsearch.xpack.security.authc.Realm; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.RealmSettings; +import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; +import org.elasticsearch.xpack.security.authc.support.mapper.CompositeRoleMapper; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.security.user.User; +import org.elasticsearch.xpack.ssl.CertUtils; +import org.elasticsearch.xpack.ssl.SSLConfigurationSettings; import javax.net.ssl.X509TrustManager; import java.security.cert.Certificate; @@ -82,17 +82,18 @@ public class PkiRealm extends Realm { } @Override - public void authenticate(AuthenticationToken authToken, ActionListener listener, IncomingRequest incomingRequest) { + public void authenticate(AuthenticationToken authToken, ActionListener listener) { X509AuthenticationToken token = (X509AuthenticationToken)authToken; if (isCertificateChainTrusted(trustManager, token, logger) == false) { - listener.onResponse(null); + listener.onResponse(AuthenticationResult.unsuccessful("Certificate for " + token.dn() + " is not trusted", null)); } else { final Map metadata = Collections.singletonMap("pki_dn", token.dn()); final UserRoleMapper.UserData user = new UserRoleMapper.UserData(token.principal(), token.dn(), Collections.emptySet(), metadata, this.config); roleMapper.resolveRoles(user, ActionListener.wrap( - roles -> listener.onResponse(new User(token.principal(), - roles.toArray(new String[roles.size()]), null, null, metadata, true)), + roles -> listener.onResponse(AuthenticationResult.success( + new User(token.principal(), roles.toArray(new String[roles.size()]), null, null, metadata, true) + )), listener::onFailure )); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java index 58a136bc70f..5dfca82779a 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealm.java @@ -5,23 +5,24 @@ */ package org.elasticsearch.xpack.security.authc.support; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.ExecutionException; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.AuthenticationToken; -import org.elasticsearch.xpack.security.authc.IncomingRequest; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.user.User; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ExecutionException; - public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm implements CachingRealm { public static final Setting CACHE_HASH_ALGO_SETTING = Setting.simpleString("cache.hash_algo", Setting.Property.NodeScope); @@ -70,16 +71,15 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm * doAuthenticate * @param authToken The authentication token * @param listener to be called at completion - * @param incomingRequest the request that is being authenticated */ @Override - public final void authenticate(AuthenticationToken authToken, ActionListener listener, IncomingRequest incomingRequest) { + public final void authenticate(AuthenticationToken authToken, ActionListener listener) { UsernamePasswordToken token = (UsernamePasswordToken) authToken; try { if (cache == null) { - doAuthenticate(token, listener, incomingRequest); + doAuthenticate(token, listener); } else { - authenticateWithCache(token, listener, incomingRequest); + authenticateWithCache(token, listener); } } catch (Exception e) { // each realm should handle exceptions, if we get one here it should be considered fatal @@ -87,72 +87,75 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm } } - private void authenticateWithCache(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { + private void authenticateWithCache(UsernamePasswordToken token, ActionListener listener) { UserWithHash userWithHash = cache.get(token.principal()); if (userWithHash == null) { if (logger.isDebugEnabled()) { logger.debug("user [{}] not found in cache for realm [{}], proceeding with normal authentication", token.principal(), name()); } - doAuthenticateAndCache(token, ActionListener.wrap((user) -> { - if (user != null) { + doAuthenticateAndCache(token, ActionListener.wrap((result) -> { + if (result.isAuthenticated()) { + final User user = result.getUser(); logger.debug("realm [{}] authenticated user [{}], with roles [{}]", name(), token.principal(), user.roles()); } - listener.onResponse(user); - }, listener::onFailure), incomingRequest); + listener.onResponse(result); + }, listener::onFailure)); } else if (userWithHash.hasHash()) { if (userWithHash.verify(token.credentials())) { if (userWithHash.user.enabled()) { User user = userWithHash.user; logger.debug("realm [{}] authenticated user [{}], with roles [{}]", name(), token.principal(), user.roles()); - listener.onResponse(user); + listener.onResponse(AuthenticationResult.success(user)); } else { // We successfully authenticated, but the cached user is disabled. // Reload the primary record to check whether the user is still disabled cache.invalidate(token.principal()); - doAuthenticateAndCache(token, ActionListener.wrap((user) -> { - if (user != null) { + doAuthenticateAndCache(token, ActionListener.wrap((result) -> { + if (result.isAuthenticated()) { + final User user = result.getUser(); logger.debug("realm [{}] authenticated user [{}] (enabled:{}), with roles [{}]", name(), token.principal(), user.enabled(), user.roles()); } - listener.onResponse(user); - }, listener::onFailure), incomingRequest); + listener.onResponse(result); + }, listener::onFailure)); } } else { cache.invalidate(token.principal()); - doAuthenticateAndCache(token, ActionListener.wrap((user) -> { - if (user != null) { + doAuthenticateAndCache(token, ActionListener.wrap((result) -> { + if (result.isAuthenticated()) { + final User user = result.getUser(); logger.debug("cached user's password changed. realm [{}] authenticated user [{}], with roles [{}]", name(), token.principal(), user.roles()); } - listener.onResponse(user); - }, listener::onFailure), incomingRequest); + listener.onResponse(result); + }, listener::onFailure)); } } else { cache.invalidate(token.principal()); - doAuthenticateAndCache(token, ActionListener.wrap((user) -> { - if (user != null) { + doAuthenticateAndCache(token, ActionListener.wrap((result) -> { + if (result.isAuthenticated()) { + final User user = result.getUser(); logger.debug("cached user came from a lookup and could not be used for authentication. " + "realm [{}] authenticated user [{}] with roles [{}]", name(), token.principal(), user.roles()); } - listener.onResponse(user); - }, listener::onFailure), incomingRequest); + listener.onResponse(result); + }, listener::onFailure)); } } - private void doAuthenticateAndCache(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { - ActionListener wrapped = ActionListener.wrap((user) -> { - if (user == null) { - listener.onResponse(null); - } else { - UserWithHash userWithHash = new UserWithHash(user, token.credentials(), hasher); + private void doAuthenticateAndCache(UsernamePasswordToken token, ActionListener listener) { + ActionListener wrapped = ActionListener.wrap((result) -> { + Objects.requireNonNull(result, "AuthenticationResult cannot be null"); + if (result.getStatus() == AuthenticationResult.Status.SUCCESS) { + UserWithHash userWithHash = new UserWithHash(result.getUser(), token.credentials(), hasher); // it doesn't matter if we already computed it elsewhere cache.put(token.principal(), userWithHash); - listener.onResponse(user); } + listener.onResponse(result); }, listener::onFailure); - doAuthenticate(token, wrapped, incomingRequest); + doAuthenticate(token, wrapped); } @Override @@ -162,7 +165,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm return stats; } - protected abstract void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest); + protected abstract void doAuthenticate(UsernamePasswordToken token, ActionListener listener); @Override public final void lookupUser(String username, ActionListener listener) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java index 02dd1f24f7d..24f1edc1325 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java @@ -5,22 +5,6 @@ */ package org.elasticsearch.xpack.security.authc.support; -import com.unboundid.ldap.sdk.DN; -import com.unboundid.ldap.sdk.LDAPException; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.watcher.FileChangesListener; -import org.elasticsearch.watcher.FileWatcher; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.XPackPlugin; -import org.elasticsearch.xpack.security.authc.RealmConfig; - import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; @@ -37,6 +21,25 @@ import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Function; +import com.unboundid.ldap.sdk.DN; +import com.unboundid.ldap.sdk.LDAPException; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.watcher.FileChangesListener; +import org.elasticsearch.watcher.FileWatcher; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.security.authc.RealmConfig; +import org.yaml.snakeyaml.error.YAMLException; + import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils.dn; @@ -57,20 +60,18 @@ public class DnRoleMapper implements UserRoleMapper { protected final Logger logger; protected final RealmConfig config; - private final String realmType; private final Path file; private final boolean useUnmappedGroupsAsRoles; private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>(); private volatile Map> dnRoles; - public DnRoleMapper(String realmType, RealmConfig config, ResourceWatcherService watcherService) { - this.realmType = realmType; + public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) { this.config = config; this.logger = config.logger(getClass()); useUnmappedGroupsAsRoles = USE_UNMAPPED_GROUPS_AS_ROLES_SETTING.get(config.settings()); file = resolveFile(config.settings(), config.env()); - dnRoles = parseFileLenient(file, logger, realmType, config.name()); + dnRoles = parseFileLenient(file, logger, config.type(), config.name()); FileWatcher watcher = new FileWatcher(file.getParent()); watcher.addListener(new FileListener()); try { @@ -101,7 +102,7 @@ public class DnRoleMapper implements UserRoleMapper { */ public static Map> parseFileLenient(Path path, Logger logger, String realmType, String realmName) { try { - return parseFile(path, logger, realmType, realmName); + return parseFile(path, logger, realmType, realmName, false); } catch (Exception e) { logger.error( (Supplier) () -> new ParameterizedMessage( @@ -110,14 +111,20 @@ public class DnRoleMapper implements UserRoleMapper { } } - public static Map> parseFile(Path path, Logger logger, String realmType, String realmName) { + public static Map> parseFile(Path path, Logger logger, String realmType, String realmName, boolean strict) { logger.trace("reading realm [{}/{}] role mappings file [{}]...", realmType, realmName, path.toAbsolutePath()); - if (!Files.exists(path)) { - logger.warn("Role mapping file [{}] for realm [{}] does not exist. Role mapping will be skipped.", + if (Files.exists(path) == false) { + final ParameterizedMessage message = new ParameterizedMessage( + "Role mapping file [{}] for realm [{}] does not exist.", path.toAbsolutePath(), realmName); - return emptyMap(); + if (strict) { + throw new ElasticsearchException(message.getFormattedMessage()); + } else { + logger.warn(message.getFormattedMessage() + " Role mapping will be skipped."); + return emptyMap(); + } } try (InputStream in = Files.newInputStream(path)) { @@ -136,14 +143,18 @@ public class DnRoleMapper implements UserRoleMapper { } dnRoles.add(role); } catch (LDAPException e) { - logger.error(new ParameterizedMessage( - "invalid DN [{}] found in [{}] role mappings [{}] for realm [{}/{}]. skipping... ", - providedDn, - realmType, - path.toAbsolutePath(), - realmType, - realmName), - e); + ParameterizedMessage message = new ParameterizedMessage( + "invalid DN [{}] found in [{}] role mappings [{}] for realm [{}/{}].", + providedDn, + realmType, + path.toAbsolutePath(), + realmType, + realmName); + if (strict) { + throw new ElasticsearchException(message.getFormattedMessage(), e); + } else { + logger.error(message.getFormattedMessage() + " skipping...", e); + } } } @@ -152,7 +163,7 @@ public class DnRoleMapper implements UserRoleMapper { logger.debug("[{}] role mappings found in file [{}] for realm [{}/{}]", dnToRoles.size(), path.toAbsolutePath(), realmType, realmName); return unmodifiableMap(dnToRoles); - } catch (IOException e) { + } catch (IOException | YAMLException e) { throw new ElasticsearchException("could not read realm [" + realmType + "/" + realmName + "] role mappings file [" + path.toAbsolutePath() + "]", e); } @@ -166,7 +177,7 @@ public class DnRoleMapper implements UserRoleMapper { public void resolveRoles(UserData user, ActionListener> listener) { try { listener.onResponse(resolveRoles(user.getDn(), user.getGroups())); - } catch( Exception e) { + } catch (Exception e) { listener.onFailure(e); } } @@ -185,8 +196,8 @@ public class DnRoleMapper implements UserRoleMapper { } } if (logger.isDebugEnabled()) { - logger.debug("the roles [{}], are mapped from these [{}] groups [{}] using file [{}] for realm [{}/{}]", roles, realmType, - groupDns, file.getFileName(), realmType, config.name()); + logger.debug("the roles [{}], are mapped from these [{}] groups [{}] using file [{}] for realm [{}/{}]", roles, config.type(), + groupDns, file.getFileName(), config.type(), config.name()); } DN userDn = dn(userDnString); @@ -197,7 +208,7 @@ public class DnRoleMapper implements UserRoleMapper { if (logger.isDebugEnabled()) { logger.debug("the roles [{}], are mapped from the user [{}] using file [{}] for realm [{}/{}]", (rolesMappedToUserDn == null) ? Collections.emptySet() : rolesMappedToUserDn, userDnString, file.getFileName(), - realmType, config.name()); + config.type(), config.name()); } return roles; } @@ -225,8 +236,8 @@ public class DnRoleMapper implements UserRoleMapper { public void onFileChanged(Path file) { if (file.equals(DnRoleMapper.this.file)) { logger.info("role mappings file [{}] changed for realm [{}/{}]. updating mappings...", file.toAbsolutePath(), - realmType, config.name()); - dnRoles = parseFileLenient(file, logger, realmType, config.name()); + config.type(), config.name()); + dnRoles = parseFileLenient(file, logger, config.type(), config.name()); notifyRefresh(); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/RoleMappingFileBootstrapCheck.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/RoleMappingFileBootstrapCheck.java new file mode 100644 index 00000000000..04de860b1b2 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/RoleMappingFileBootstrapCheck.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support; + +import java.nio.file.Path; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.xpack.security.authc.RealmConfig; + +/** + * A BootstrapCheck that {@link DnRoleMapper} files exist and are valid (valid YAML and valid DNs) + */ +public class RoleMappingFileBootstrapCheck implements BootstrapCheck { + + private final RealmConfig realmConfig; + private final Path path; + + private final SetOnce error = new SetOnce<>(); + + public RoleMappingFileBootstrapCheck(RealmConfig config, Path path) { + this.realmConfig = config; + this.path = path; + } + + @Override + public boolean check() { + try { + DnRoleMapper.parseFile(path, realmConfig.logger(getClass()), realmConfig.type(), realmConfig.name(), true); + return false; + } catch (Exception e) { + error.set(e.getMessage()); + return true; + } + + } + + @Override + public String errorMessage() { + return error.get(); + } + + @Override + public boolean alwaysEnforce() { + return true; + } + + public static BootstrapCheck create(RealmConfig realmConfig) { + if (realmConfig.enabled() && DnRoleMapper.ROLE_MAPPING_FILE_SETTING.exists(realmConfig.settings())) { + Path file = DnRoleMapper.resolveFile(realmConfig.settings(), realmConfig.env()); + return new RoleMappingFileBootstrapCheck(realmConfig, file); + } + return null; + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java index 5bf44f1a5ab..e981456ac6b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java @@ -34,7 +34,7 @@ public class CompositeRoleMapper implements UserRoleMapper { public CompositeRoleMapper(String realmType, RealmConfig realmConfig, ResourceWatcherService watcherService, NativeRoleMappingStore nativeRoleMappingStore) { - this(new DnRoleMapper(realmType, realmConfig, watcherService), nativeRoleMappingStore); + this(new DnRoleMapper(realmConfig, watcherService), nativeRoleMappingStore); } private CompositeRoleMapper(UserRoleMapper... delegates) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index d581e636b93..c439950bf65 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -152,15 +152,6 @@ public class AuthorizationService extends AbstractComponent { throw denial(authentication, action, request); } - // norelease - // TODO: This functionality is disabled as it is not yet compatible with the upgrade process - // If the user is the elastic user in setup mode, then only change password requests can be authorized -// if (ElasticUser.isElasticUserInSetupMode(authentication.getUser()) -// && ChangePasswordAction.NAME.equals(action) == false -// && ClusterHealthAction.NAME.equals(action) == false) { -// throw denial(authentication, action, request); -// } - // get the roles of the authenticated user, which may be different than the effective Role permission = userRole; diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldExtractor.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldExtractor.java index 70e063e0873..28aa7c69a01 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldExtractor.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldExtractor.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.security.authz.accesscontrol; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.DocValuesNumbersQuery; -import org.apache.lucene.search.FieldValueQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -23,9 +23,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.Weight; import org.apache.lucene.search.spans.SpanTermQuery; -import java.util.ArrayList; import java.util.HashSet; -import java.util.List; import java.util.Set; /** @@ -77,8 +75,8 @@ class FieldExtractor { fields.add(((PointRangeQuery)query).getField()); } else if (query instanceof PointInSetQuery) { fields.add(((PointInSetQuery)query).getField()); - } else if (query instanceof FieldValueQuery) { - fields.add(((FieldValueQuery)query).getField()); + } else if (query instanceof DocValuesFieldExistsQuery) { + fields.add(((DocValuesFieldExistsQuery)query).getField()); } else if (query instanceof DocValuesNumbersQuery) { fields.add(((DocValuesNumbersQuery)query).getField()); } else if (query instanceof IndexOrDocValuesQuery) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java index b1a2bf8f0b5..3ca9f6093ef 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java @@ -58,6 +58,7 @@ import org.elasticsearch.index.shard.ShardUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.TemplateScript; import org.elasticsearch.xpack.security.authc.Authentication; import org.elasticsearch.xpack.security.authz.AuthorizationService; @@ -272,7 +273,8 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper { userModel.put("metadata", Collections.unmodifiableMap(user.metadata())); params.put("_user", userModel); // Always enforce mustache script lang: - script = new Script(script.getType(), "mustache", script.getIdOrCode(), script.getOptions(), params); + script = new Script(script.getType(), + script.getType() == ScriptType.STORED ? null : "mustache", script.getIdOrCode(), script.getOptions(), params); TemplateScript compiledTemplate = scriptService.compile(script, TemplateScript.CONTEXT).newInstance(script.getParams()); return compiledTemplate.execute(); } else { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/privilege/SystemPrivilege.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/privilege/SystemPrivilege.java index c2fc2375c90..2a75f33cb1b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/privilege/SystemPrivilege.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/privilege/SystemPrivilege.java @@ -20,7 +20,9 @@ public final class SystemPrivilege extends Privilege { "indices:monitor/*", // added for monitoring "cluster:monitor/*", // added for monitoring "cluster:admin/reroute", // added for DiskThresholdDecider.DiskListener - "indices:admin/mapping/put" // needed for recovery and shrink api + "indices:admin/mapping/put", // needed for recovery and shrink api + "indices:admin/template/put", // needed for the TemplateUpgradeService + "indices:admin/template/delete" // needed for the TemplateUpgradeService ), Automatons.patterns("internal:transport/proxy/*"))); // no proxy actions for system user! private SystemPrivilege() { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index 8c6c69bcfa2..e3835c5e219 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -6,6 +6,8 @@ package org.elasticsearch.xpack.security.authz.store; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cache.Cache; @@ -83,8 +85,7 @@ public class CompositeRolesStore extends AbstractComponent { public CompositeRolesStore(Settings settings, FileRolesStore fileRolesStore, NativeRolesStore nativeRolesStore, ReservedRolesStore reservedRolesStore, List, ActionListener>>> rolesProviders, - ThreadContext threadContext, - XPackLicenseState licenseState) { + ThreadContext threadContext, XPackLicenseState licenseState) { super(settings); this.fileRolesStore = fileRolesStore; // invalidating all on a file based role update is heavy handed to say the least, but in general this should be infrequent so the @@ -289,6 +290,16 @@ public class CompositeRolesStore extends AbstractComponent { }, listener::onFailure)); } + public void onSecurityIndexHealthChange(ClusterIndexHealth previousHealth, ClusterIndexHealth currentHealth) { + final boolean movedFromRedToNonRed = (previousHealth == null || previousHealth.getStatus() == ClusterHealthStatus.RED) + && currentHealth != null && currentHealth.getStatus() != ClusterHealthStatus.RED; + final boolean indexDeleted = previousHealth != null && currentHealth == null; + + if (movedFromRedToNonRed || indexDeleted) { + invalidateAll(); + } + } + /** * A mutable class that can be used to represent the combination of one or more {@link IndicesPrivileges} */ diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java index f01d38302ee..f3235978f91 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStore.java @@ -58,8 +58,8 @@ public class ReservedRolesStore { null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put("ingest_admin", new RoleDescriptor("ingest_admin", new String[] { "manage_index_templates", "manage_pipeline" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) - .put("reporting_user", new RoleDescriptor("reporting_user", null, new RoleDescriptor.IndicesPrivileges[] { - RoleDescriptor.IndicesPrivileges.builder().indices(".reporting-*").privileges("read", "write").build() }, + // reporting_user doesn't have any privileges in Elasticsearch, and Kibana authorizes privileges based on this role + .put("reporting_user", new RoleDescriptor("reporting_user", null, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put(KibanaUser.ROLE_NAME, new RoleDescriptor(KibanaUser.ROLE_NAME, new String[] { "monitor", MonitoringBulkAction.NAME}, new RoleDescriptor.IndicesPrivileges[] { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/bootstrap/BootstrapElasticPassword.java b/plugin/src/main/java/org/elasticsearch/xpack/security/bootstrap/BootstrapElasticPassword.java new file mode 100644 index 00000000000..db15acc2029 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/bootstrap/BootstrapElasticPassword.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.bootstrap; + +import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.action.user.ChangePasswordRequestBuilder; +import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; + +import java.util.concurrent.Semaphore; + +/** + * This process adds a ClusterStateListener to the ClusterService that will listen for cluster state updates. + * Once the cluster and the security index are ready, it will attempt to bootstrap the elastic user's + * password with a password from the keystore. If the password is not in the keystore or the elastic user + * already has a password, then the user's password will not be set. Once the process is complete, the + * listener will remove itself. + */ +public final class BootstrapElasticPassword { + + private final Settings settings; + private final Logger logger; + private final ClusterService clusterService; + private final ReservedRealm reservedRealm; + private final SecurityLifecycleService lifecycleService; + private final boolean reservedRealmDisabled; + + public BootstrapElasticPassword(Settings settings, Logger logger, ClusterService clusterService, ReservedRealm reservedRealm, + SecurityLifecycleService lifecycleService) { + this.reservedRealmDisabled = XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings) == false; + this.settings = settings; + this.logger = logger; + this.clusterService = clusterService; + this.reservedRealm = reservedRealm; + this.lifecycleService = lifecycleService; + } + + public void initiatePasswordBootstrap() { + SecureString bootstrapPassword = ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.get(settings); + if (bootstrapPassword.length() == 0) { + return; + } else if (reservedRealmDisabled) { + logger.warn("elastic password will not be bootstrapped because the reserved realm is disabled"); + bootstrapPassword.close(); + return; + } + + SecureString passwordHash = new SecureString(ChangePasswordRequestBuilder.validateAndHashPassword(bootstrapPassword)); + bootstrapPassword.close(); + + clusterService.addListener(new BootstrapPasswordClusterStateListener(passwordHash)); + } + + private class BootstrapPasswordClusterStateListener implements ClusterStateListener { + + private final Semaphore semaphore = new Semaphore(1); + private final SecureString passwordHash; + private final SetOnce isDone = new SetOnce<>(); + + private BootstrapPasswordClusterStateListener(SecureString passwordHash) { + this.passwordHash = passwordHash; + } + + @Override + public void clusterChanged(ClusterChangedEvent event) { + if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) + || lifecycleService.isSecurityIndexOutOfDate() + || (lifecycleService.isSecurityIndexExisting() && lifecycleService.isSecurityIndexAvailable() == false) + || lifecycleService.isSecurityIndexWriteable() == false) { + // We hold off bootstrapping until the node recovery is complete, the security index is up to date, and + // security index is writeable. If the security index currently exists, it must also be available. + return; + } + + // Only allow one attempt to bootstrap the password at a time + if (semaphore.tryAcquire()) { + // Ensure that we do not attempt to bootstrap after the process is complete. This is important as we + // clear the password hash in the cleanup phase. + if (isDone.get() != null) { + semaphore.release(); + return; + } + + reservedRealm.bootstrapElasticUserCredentials(passwordHash, new ActionListener() { + @Override + public void onResponse(Boolean passwordSet) { + cleanup(); + if (passwordSet == false) { + logger.warn("elastic password was not bootstrapped because its password was already set"); + } + semaphore.release(); + } + + @Override + public void onFailure(Exception e) { + cleanup(); + logger.error("unexpected exception when attempting to bootstrap password", e); + semaphore.release(); + } + }); + } + } + + private void cleanup() { + isDone.set(true); + IOUtils.closeWhileHandlingException(() -> clusterService.removeListener(this), passwordHash); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java b/plugin/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java index 51c8ecb41f6..da538d4fbf9 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/support/Automatons.java @@ -21,6 +21,7 @@ import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZ import static org.apache.lucene.util.automaton.Operations.concatenate; import static org.apache.lucene.util.automaton.Operations.minus; import static org.apache.lucene.util.automaton.Operations.union; +import static org.elasticsearch.common.Strings.collectionToDelimitedString; public final class Automatons { @@ -122,11 +123,25 @@ public final class Automatons { } public static Predicate predicate(Collection patterns) { - return predicate(patterns(patterns)); + return predicate(patterns(patterns), collectionToDelimitedString(patterns, "|")); } public static Predicate predicate(Automaton automaton) { + return predicate(automaton, "Predicate for " + automaton); + } + + private static Predicate predicate(Automaton automaton, final String toString) { CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_MAX_DETERMINIZED_STATES); - return runAutomaton::run; + return new Predicate() { + @Override + public boolean test(String s) { + return runAutomaton.run(s); + } + + @Override + public String toString() { + return toString; + } + }; } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/support/IndexLifecycleManager.java b/plugin/src/main/java/org/elasticsearch/xpack/security/support/IndexLifecycleManager.java index a229ea3bb12..04cf5f432f1 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/support/IndexLifecycleManager.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/support/IndexLifecycleManager.java @@ -5,15 +5,16 @@ */ package org.elasticsearch.xpack.security.support; -import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -34,6 +35,7 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateReque import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; @@ -82,6 +84,8 @@ public class IndexLifecycleManager extends AbstractComponent { private final AtomicReference migrateDataState = new AtomicReference<>(UpgradeState.NOT_STARTED); private final AtomicInteger migrateDataAttempts = new AtomicInteger(0); + private final List> indexHealthChangeListeners = new CopyOnWriteArrayList<>(); + private volatile boolean templateIsUpToDate; private volatile boolean indexExists; private volatile boolean isIndexUpToDate; @@ -155,9 +159,18 @@ public class IndexLifecycleManager extends AbstractComponent { return this.migrateDataState.get(); } + /** + * Adds a listener which will be notified when the security index health changes. The previous and + * current health will be provided to the listener so that the listener can determine if any action + * needs to be taken. + */ + public void addIndexHealthChangeListener(BiConsumer listener) { + indexHealthChangeListeners.add(listener); + } + public void clusterChanged(ClusterChangedEvent event) { - final ClusterState state = event.state(); - processClusterState(state); + processClusterState(event.state()); + checkIndexHealthChange(event); } private void processClusterState(ClusterState state) { @@ -183,6 +196,37 @@ public class IndexLifecycleManager extends AbstractComponent { } } + private void checkIndexHealthChange(ClusterChangedEvent event) { + final ClusterState state = event.state(); + final ClusterState previousState = event.previousState(); + final IndexMetaData indexMetaData = resolveConcreteIndex(indexName, state.metaData()); + final IndexMetaData previousIndexMetaData = resolveConcreteIndex(indexName, previousState.metaData()); + if (indexMetaData != null) { + final ClusterIndexHealth currentHealth = + new ClusterIndexHealth(indexMetaData, state.getRoutingTable().index(indexMetaData.getIndex())); + final ClusterIndexHealth previousHealth = previousIndexMetaData != null ? new ClusterIndexHealth(previousIndexMetaData, + previousState.getRoutingTable().index(previousIndexMetaData.getIndex())) : null; + + if (previousHealth == null || previousHealth.getStatus() != currentHealth.getStatus()) { + notifyIndexHealthChangeListeners(previousHealth, currentHealth); + } + } else if (previousIndexMetaData != null) { + final ClusterIndexHealth previousHealth = + new ClusterIndexHealth(previousIndexMetaData, previousState.getRoutingTable().index(previousIndexMetaData.getIndex())); + notifyIndexHealthChangeListeners(previousHealth, null); + } + } + + private void notifyIndexHealthChangeListeners(ClusterIndexHealth previousHealth, ClusterIndexHealth currentHealth) { + for (BiConsumer consumer : indexHealthChangeListeners) { + try { + consumer.accept(previousHealth, currentHealth); + } catch (Exception e) { + logger.warn(new ParameterizedMessage("failed to notify listener [{}] of index health change", consumer), e); + } + } + } + private boolean checkIndexAvailable(ClusterState state) { final IndexRoutingTable routingTable = getIndexRoutingTable(state); if (routingTable != null && routingTable.allPrimaryShardsActive()) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java b/plugin/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java index e22d97c6276..fe9e401e9bd 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/security/user/ElasticUser.java @@ -18,27 +18,8 @@ public class ElasticUser extends User { public static final String NAME = "elastic"; private static final String ROLE_NAME = "superuser"; - private static final String SETUP_MODE = "_setup_mode"; public ElasticUser(boolean enabled) { - this(enabled, false); - } - - public ElasticUser(boolean enabled, boolean setupMode) { - super(NAME, new String[] { ROLE_NAME }, null, null, metadata(setupMode), enabled); - } - - public static boolean isElasticUserInSetupMode(User user) { - return NAME.equals(user.principal()) && Boolean.TRUE.equals(user.metadata().get(SETUP_MODE)); - } - - private static Map metadata(boolean setupMode) { - if (setupMode == false) { - return MetadataUtils.DEFAULT_RESERVED_METADATA; - } else { - HashMap metadata = new HashMap<>(MetadataUtils.DEFAULT_RESERVED_METADATA); - metadata.put(SETUP_MODE, true); - return metadata; - } + super(NAME, new String[] { ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertUtils.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertUtils.java index 0dcc9411db4..b66a01b0efe 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertUtils.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertUtils.java @@ -5,44 +5,6 @@ */ package org.elasticsearch.xpack.ssl; -import org.bouncycastle.asn1.ASN1ObjectIdentifier; -import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; -import org.bouncycastle.asn1.pkcs.PrivateKeyInfo; -import org.bouncycastle.asn1.x500.X500Name; -import org.bouncycastle.asn1.x509.AuthorityKeyIdentifier; -import org.bouncycastle.asn1.x509.BasicConstraints; -import org.bouncycastle.asn1.x509.Extension; -import org.bouncycastle.asn1.x509.ExtensionsGenerator; -import org.bouncycastle.asn1.x509.GeneralName; -import org.bouncycastle.asn1.x509.GeneralNames; -import org.bouncycastle.asn1.x509.Time; -import org.bouncycastle.cert.CertIOException; -import org.bouncycastle.cert.X509CertificateHolder; -import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; -import org.bouncycastle.cert.jcajce.JcaX509ExtensionUtils; -import org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder; -import org.bouncycastle.jce.provider.BouncyCastleProvider; -import org.bouncycastle.openssl.PEMEncryptedKeyPair; -import org.bouncycastle.openssl.PEMKeyPair; -import org.bouncycastle.openssl.PEMParser; -import org.bouncycastle.openssl.X509TrustedCertificateBlock; -import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter; -import org.bouncycastle.openssl.jcajce.JcePEMDecryptorProviderBuilder; -import org.bouncycastle.operator.ContentSigner; -import org.bouncycastle.operator.OperatorCreationException; -import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; -import org.bouncycastle.pkcs.PKCS10CertificationRequest; -import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.network.InetAddressHelper; -import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.env.Environment; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; - import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManager; @@ -79,12 +41,54 @@ import java.util.Locale; import java.util.Set; import java.util.function.Supplier; +import org.bouncycastle.asn1.ASN1Encodable; +import org.bouncycastle.asn1.ASN1ObjectIdentifier; +import org.bouncycastle.asn1.DERIA5String; +import org.bouncycastle.asn1.DERSequence; +import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; +import org.bouncycastle.asn1.pkcs.PrivateKeyInfo; +import org.bouncycastle.asn1.x500.X500Name; +import org.bouncycastle.asn1.x509.AuthorityKeyIdentifier; +import org.bouncycastle.asn1.x509.BasicConstraints; +import org.bouncycastle.asn1.x509.Extension; +import org.bouncycastle.asn1.x509.ExtensionsGenerator; +import org.bouncycastle.asn1.x509.GeneralName; +import org.bouncycastle.asn1.x509.GeneralNames; +import org.bouncycastle.asn1.x509.Time; +import org.bouncycastle.cert.CertIOException; +import org.bouncycastle.cert.X509CertificateHolder; +import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; +import org.bouncycastle.cert.jcajce.JcaX509ExtensionUtils; +import org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder; +import org.bouncycastle.jce.provider.BouncyCastleProvider; +import org.bouncycastle.openssl.PEMEncryptedKeyPair; +import org.bouncycastle.openssl.PEMKeyPair; +import org.bouncycastle.openssl.PEMParser; +import org.bouncycastle.openssl.X509TrustedCertificateBlock; +import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter; +import org.bouncycastle.openssl.jcajce.JcePEMDecryptorProviderBuilder; +import org.bouncycastle.operator.ContentSigner; +import org.bouncycastle.operator.OperatorCreationException; +import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; +import org.bouncycastle.pkcs.PKCS10CertificationRequest; +import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.network.InetAddressHelper; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.env.Environment; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + /** * Utility methods that deal with {@link Certificate}, {@link KeyStore}, {@link X509ExtendedTrustManager}, {@link X509ExtendedKeyManager} * and other certificate related objects. */ public class CertUtils { + static final String CN_OID = "2.5.4.3"; + private static final int SERIAL_BIT_LENGTH = 20 * 8; static final BouncyCastleProvider BC_PROV = new BouncyCastleProvider(); @@ -137,6 +141,7 @@ public class CertUtils { */ public static X509ExtendedTrustManager trustManager(Certificate[] certificates) throws NoSuchAlgorithmException, UnrecoverableKeyException, KeyStoreException, IOException, CertificateException { + assert certificates != null : "Cannot create trust manager with null certificates"; KeyStore store = KeyStore.getInstance("jks"); store.load(null, null); int counter = 0; @@ -416,4 +421,16 @@ public class CertUtils { } } } + + /** + * Creates an X.509 {@link GeneralName} for use as a Common Name in the certificate's Subject Alternative Names + * extension. A common name is a name with a tag of {@link GeneralName#otherName OTHER}, with an object-id that references + * the {@link #CN_OID cn} attribute, and a DER encoded IA5 (ASCII) string for the name. + * This usage of using the {@code cn} OID as a Subject Alternative Name is non-standard and will not be + * recognised by other X.509/TLS implementations. + */ + static GeneralName createCommonName(String cn) { + final ASN1Encodable[] sequence = { new ASN1ObjectIdentifier(CN_OID), new DERIA5String(cn) }; + return new GeneralName(GeneralName.otherName, new DERSequence(sequence)); + } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertificateTool.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertificateTool.java index d8f5726b58e..fa5efa3ce78 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertificateTool.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertificateTool.java @@ -5,31 +5,6 @@ */ package org.elasticsearch.xpack.ssl; -import joptsimple.OptionSet; -import joptsimple.OptionSpec; -import org.bouncycastle.asn1.DERIA5String; -import org.bouncycastle.asn1.x509.GeneralName; -import org.bouncycastle.asn1.x509.GeneralNames; -import org.bouncycastle.openssl.PEMEncryptor; -import org.bouncycastle.openssl.jcajce.JcaPEMWriter; -import org.bouncycastle.openssl.jcajce.JcePEMEncryptorBuilder; -import org.bouncycastle.pkcs.PKCS10CertificationRequest; -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.cli.EnvironmentAwareCommand; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.env.Environment; - import javax.security.auth.x500.X500Principal; import java.io.IOException; import java.io.OutputStream; @@ -59,6 +34,31 @@ import java.util.regex.Pattern; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; +import joptsimple.OptionSet; +import joptsimple.OptionSpec; +import org.bouncycastle.asn1.DERIA5String; +import org.bouncycastle.asn1.x509.GeneralName; +import org.bouncycastle.asn1.x509.GeneralNames; +import org.bouncycastle.openssl.PEMEncryptor; +import org.bouncycastle.openssl.jcajce.JcaPEMWriter; +import org.bouncycastle.openssl.jcajce.JcePEMEncryptorBuilder; +import org.bouncycastle.pkcs.PKCS10CertificationRequest; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.cli.EnvironmentAwareCommand; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.env.Environment; + /** * CLI tool to make generation of certificates or certificate requests easier for users */ @@ -89,11 +89,13 @@ public class CertificateTool extends EnvironmentAwareCommand { new ConstructingObjectParser<>( "instances", a -> new CertificateInformation( - (String) a[0], (String) (a[1] == null ? a[0] : a[1]), (List) a[2], (List) a[3])); + (String) a[0], (String) (a[1] == null ? a[0] : a[1]), + (List) a[2], (List) a[3], (List) a[4])); instanceParser.declareString(ConstructingObjectParser.constructorArg(), new ParseField("name")); instanceParser.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("filename")); instanceParser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), new ParseField("ip")); instanceParser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), new ParseField("dns")); + instanceParser.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), new ParseField("cn")); PARSER.declareObjectArray(List::addAll, instanceParser, new ParseField("instances")); } @@ -220,8 +222,9 @@ public class CertificateTool extends EnvironmentAwareCommand { String dnsNames = terminal.readText("Enter DNS names for instance (comma-separated if more than one) []: "); List ipList = Arrays.asList(Strings.splitStringByCommaToArray(ipAddresses)); List dnsList = Arrays.asList(Strings.splitStringByCommaToArray(dnsNames)); + List commonNames = null; - CertificateInformation information = new CertificateInformation(name, filename, ipList, dnsList); + CertificateInformation information = new CertificateInformation(name, filename, ipList, dnsList, commonNames); List validationErrors = information.validate(); if (validationErrors.isEmpty()) { if (map.containsKey(name)) { @@ -269,7 +272,8 @@ public class CertificateTool extends EnvironmentAwareCommand { fullyWriteFile(outputFile, (outputStream, pemWriter) -> { for (CertificateInformation certificateInformation : certInfo) { KeyPair keyPair = CertUtils.generateKeyPair(keysize); - GeneralNames sanList = getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames); + GeneralNames sanList = getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, + certificateInformation.commonNames); PKCS10CertificationRequest csr = CertUtils.generateCSR(keyPair, certificateInformation.name.x500Principal, sanList); final String dirName = certificateInformation.name.filename + "/"; @@ -352,7 +356,8 @@ public class CertificateTool extends EnvironmentAwareCommand { for (CertificateInformation certificateInformation : certificateInformations) { KeyPair keyPair = CertUtils.generateKeyPair(keysize); Certificate certificate = CertUtils.generateSignedCertificate(certificateInformation.name.x500Principal, - getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames), + getSubjectAlternativeNamesValue(certificateInformation.ipAddresses, certificateInformation.dnsNames, + certificateInformation.commonNames), keyPair, caInfo.caCert, caInfo.privateKey, days); final String dirName = certificateInformation.name.filename + "/"; @@ -531,7 +536,7 @@ public class CertificateTool extends EnvironmentAwareCommand { } } - private static GeneralNames getSubjectAlternativeNamesValue(List ipAddresses, List dnsNames) { + private static GeneralNames getSubjectAlternativeNamesValue(List ipAddresses, List dnsNames, List commonNames) { Set generalNameList = new HashSet<>(); for (String ip : ipAddresses) { generalNameList.add(new GeneralName(GeneralName.iPAddress, ip)); @@ -541,6 +546,10 @@ public class CertificateTool extends EnvironmentAwareCommand { generalNameList.add(new GeneralName(GeneralName.dNSName, dns)); } + for (String cn : commonNames) { + generalNameList.add(CertUtils.createCommonName(cn)); + } + if (generalNameList.isEmpty()) { return null; } @@ -551,11 +560,13 @@ public class CertificateTool extends EnvironmentAwareCommand { final Name name; final List ipAddresses; final List dnsNames; + final List commonNames; - CertificateInformation(String name, String filename, List ipAddresses, List dnsNames) { + CertificateInformation(String name, String filename, List ipAddresses, List dnsNames, List commonNames) { this.name = Name.fromUserProvidedName(name, filename); this.ipAddresses = ipAddresses == null ? Collections.emptyList() : ipAddresses; this.dnsNames = dnsNames == null ? Collections.emptyList() : dnsNames; + this.commonNames = commonNames == null ? Collections.emptyList() : commonNames; } List validate() { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertificateTrustRestrictions.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertificateTrustRestrictions.java new file mode 100644 index 00000000000..ff98df7141d --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/CertificateTrustRestrictions.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ssl; + +import java.util.Collection; +import java.util.Collections; +import java.util.Set; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.elasticsearch.xpack.security.support.Automatons; + +/** + * Im memory representation of the trusted names for a "trust group". + * + * @see RestrictedTrustManager + */ +class CertificateTrustRestrictions { + + private final Set> trustedNames; + + CertificateTrustRestrictions(Collection trustedNames) { + this.trustedNames = trustedNames.stream().map(Automatons::predicate).collect(Collectors.toSet()); + } + + /** + * @return The names (X509 certificate subjectAlternateNames) of the nodes that are + * allowed to connect to this cluster (for the targeted interface) . + */ + Set> getTrustedNames() { + return Collections.unmodifiableSet(trustedNames); + } + + @Override + public String toString() { + return "{trustedNames=" + trustedNames + '}'; + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/RestrictedTrustConfig.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/RestrictedTrustConfig.java new file mode 100644 index 00000000000..d8ec483c6ce --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/RestrictedTrustConfig.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ssl; + +import javax.net.ssl.X509ExtendedTrustManager; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; + +/** + * An implementation of {@link TrustConfig} that constructs a {@link RestrictedTrustManager}. + * This implementation always wraps another TrustConfig to perform the + * underlying certificate validation. + */ +public final class RestrictedTrustConfig extends TrustConfig { + + public static final String RESTRICTIONS_KEY_SUBJECT_NAME = "trust.subject_name"; + private final Settings settings; + private final String groupConfigPath; + private final TrustConfig delegate; + + public RestrictedTrustConfig(Settings settings, String groupConfigPath, TrustConfig delegate) { + this.settings = settings; + this.groupConfigPath = Objects.requireNonNull(groupConfigPath); + this.delegate = Objects.requireNonNull(delegate); + } + + @Override + RestrictedTrustManager createTrustManager(@Nullable Environment environment) { + try { + final X509ExtendedTrustManager delegateTrustManager = delegate.createTrustManager(environment); + final CertificateTrustRestrictions trustGroupConfig = readTrustGroup(resolveGroupConfigPath(environment)); + return new RestrictedTrustManager(settings, delegateTrustManager, trustGroupConfig); + } catch (IOException e) { + throw new ElasticsearchException("failed to initialize TrustManager for {}", e, toString()); + } + } + + @Override + List filesToMonitor(@Nullable Environment environment) { + return Collections.singletonList(resolveGroupConfigPath(environment)); + } + + @Override + public String toString() { + return "restrictedTrust=[" + groupConfigPath + ']'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + RestrictedTrustConfig that = (RestrictedTrustConfig) o; + return this.groupConfigPath.equals(that.groupConfigPath) && this.delegate.equals(that.delegate); + } + + @Override + public int hashCode() { + int result = groupConfigPath.hashCode(); + result = 31 * result + delegate.hashCode(); + return result; + } + + private Path resolveGroupConfigPath(@Nullable Environment environment) { + return CertUtils.resolvePath(groupConfigPath, environment); + } + + private CertificateTrustRestrictions readTrustGroup(Path path) throws IOException { + try (InputStream in = Files.newInputStream(path)) { + Settings settings = Settings.builder().loadFromStream(path.toString(), in).build(); + final String[] trustNodeNames = settings.getAsArray(RESTRICTIONS_KEY_SUBJECT_NAME); + return new CertificateTrustRestrictions(Arrays.asList(trustNodeNames)); + } + } +} diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/RestrictedTrustManager.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/RestrictedTrustManager.java new file mode 100644 index 00000000000..9d06d280050 --- /dev/null +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/RestrictedTrustManager.java @@ -0,0 +1,153 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ssl; + +import javax.net.ssl.SSLEngine; +import javax.net.ssl.X509ExtendedTrustManager; +import java.net.Socket; +import java.security.cert.CertificateException; +import java.security.cert.CertificateParsingException; +import java.security.cert.X509Certificate; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.bouncycastle.asn1.ASN1ObjectIdentifier; +import org.bouncycastle.asn1.ASN1Sequence; +import org.bouncycastle.asn1.ASN1TaggedObject; +import org.bouncycastle.asn1.DERTaggedObject; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; + +/** + * An X509 trust manager that only trusts connections from a restricted set of predefined network entities (nodes, clients, etc). + * The trusted entities are defined as a list of predicates on {@link CertificateTrustRestrictions} that are applied to the + * common-names of the certificate. + * The common-names are read as subject-alternative-names with type 'Other' and a 'cn' OID. + * The underlying certificate validation is delegated to another TrustManager. + */ +public final class RestrictedTrustManager extends X509ExtendedTrustManager { + + private final Logger logger; + private final X509ExtendedTrustManager delegate; + private final CertificateTrustRestrictions trustRestrictions; + private final int SAN_CODE_OTHERNAME = 0; + + public RestrictedTrustManager(Settings settings, X509ExtendedTrustManager delegate, CertificateTrustRestrictions restrictions) { + this.logger = Loggers.getLogger(getClass(), settings); + this.delegate = delegate; + this.trustRestrictions = restrictions; + logger.debug("Configured with trust restrictions: [{}]", restrictions); + } + + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType, Socket socket) throws CertificateException { + delegate.checkClientTrusted(chain, authType, socket); + verifyTrust(chain); + } + + @Override + public void checkServerTrusted(X509Certificate[] chain, String authType, Socket socket) throws CertificateException { + delegate.checkServerTrusted(chain, authType, socket); + verifyTrust(chain); + } + + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType, SSLEngine engine) throws CertificateException { + delegate.checkClientTrusted(chain, authType, engine); + verifyTrust(chain); + } + + @Override + public void checkServerTrusted(X509Certificate[] chain, String authType, SSLEngine engine) throws CertificateException { + delegate.checkServerTrusted(chain, authType, engine); + verifyTrust(chain); + } + + @Override + public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { + delegate.checkClientTrusted(chain, authType); + verifyTrust(chain); + } + + @Override + public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { + delegate.checkServerTrusted(chain, authType); + verifyTrust(chain); + } + + @Override + public X509Certificate[] getAcceptedIssuers() { + return delegate.getAcceptedIssuers(); + } + + private void verifyTrust(X509Certificate[] chain) throws CertificateException { + if (chain.length == 0) { + throw new CertificateException("No certificate presented"); + } + final X509Certificate certificate = chain[0]; + Set names = readCommonNames(certificate); + if (verifyCertificateNames(names)) { + logger.debug(() -> new ParameterizedMessage("Trusting certificate [{}] [{}] with common-names [{}]", + certificate.getSubjectDN(), certificate.getSerialNumber().toString(16), names)); + } else { + logger.info("Rejecting certificate [{}] [{}] with common-names [{}]", + certificate.getSubjectDN(), certificate.getSerialNumber().toString(16), names); + throw new CertificateException("Certificate for " + certificate.getSubjectDN() + + " with common-names " + names + + " does not match the trusted names " + trustRestrictions.getTrustedNames()); + } + } + + private boolean verifyCertificateNames(Set names) { + for (Predicate trust : trustRestrictions.getTrustedNames()) { + final Optional match = names.stream().filter(trust).findFirst(); + if (match.isPresent()) { + logger.debug("Name [{}] matches trusted pattern [{}]", match.get(), trust); + return true; + } + } + return false; + } + + private Set readCommonNames(X509Certificate certificate) throws CertificateParsingException { + return getSubjectAlternativeNames(certificate).stream() + .filter(pair -> ((Integer) pair.get(0)).intValue() == SAN_CODE_OTHERNAME) + .map(pair -> pair.get(1)) + .map(value -> { + ASN1Sequence seq = ASN1Sequence.getInstance(value); + assert seq.size() == 2 : "Incorrect sequence length for 'other name'"; + final String id = ASN1ObjectIdentifier.getInstance(seq.getObjectAt(0)).getId(); + if (CertUtils.CN_OID.equals(id)) { + final ASN1TaggedObject object = DERTaggedObject.getInstance(seq.getObjectAt(1)); + final String cn = object.getObject().toString(); + logger.trace("Read cn [{}] from ASN1Sequence [{}]", cn, seq); + return cn; + } else { + logger.debug("Certificate [{}] has 'otherName' [{}] with unsupported object-id [{}]", + certificate.getSubjectDN(), seq, id); + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + } + + + private Collection> getSubjectAlternativeNames(X509Certificate certificate) throws CertificateParsingException { + final Collection> sans = certificate.getSubjectAlternativeNames(); + logger.trace("Certificate [{}] has subject alternative names [{}]", certificate.getSubjectDN(), sans); + return sans == null ? Collections.emptyList() : sans; + } +} + diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfiguration.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfiguration.java index dce50294683..b81aa8531b2 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfiguration.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfiguration.java @@ -5,7 +5,9 @@ */ package org.elasticsearch.xpack.ssl; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -39,6 +41,7 @@ public final class SSLConfiguration { /** * Creates a new SSLConfiguration from the given settings. There is no fallback configuration when invoking this constructor so * un-configured aspects will take on their default values. + * * @param settings the SSL specific settings; only the settings under a *.ssl. prefix */ SSLConfiguration(Settings settings) { @@ -53,7 +56,8 @@ public final class SSLConfiguration { /** * Creates a new SSLConfiguration from the given settings and global/default SSLConfiguration. If the settings do not contain a value * for a given aspect, the value from the global configuration will be used. - * @param settings the SSL specific settings; only the settings under a *.ssl. prefix + * + * @param settings the SSL specific settings; only the settings under a *.ssl. prefix * @param globalSSLConfiguration the default configuration that is used as a fallback */ SSLConfiguration(Settings settings, SSLConfiguration globalSSLConfiguration) { @@ -213,7 +217,15 @@ public final class SSLConfiguration { } private static TrustConfig createTrustConfig(Settings settings, KeyConfig keyConfig, SSLConfiguration global) { + final TrustConfig trustConfig = createCertChainTrustConfig(settings, keyConfig, global); + return SETTINGS_PARSER.trustRestrictionsPath.get(settings) + .map(path -> (TrustConfig) new RestrictedTrustConfig(settings, path, trustConfig)) + .orElse(trustConfig); + } + + private static TrustConfig createCertChainTrustConfig(Settings settings, KeyConfig keyConfig, SSLConfiguration global) { String trustStorePath = SETTINGS_PARSER.truststorePath.get(settings).orElse(null); + List caPaths = getListOrNull(SETTINGS_PARSER.caPaths, settings); if (trustStorePath != null && caPaths != null) { throw new IllegalArgumentException("you cannot specify a truststore and ca files"); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java index 492bda6bea1..54a3786c4dc 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLConfigurationSettings.java @@ -36,6 +36,7 @@ public class SSLConfigurationSettings { public final Setting> truststorePath; public final Setting truststorePassword; public final Setting truststoreAlgorithm; + public final Setting> trustRestrictionsPath; public final Setting> keyPath; public final Setting keyPassword; public final Setting> cert; @@ -120,6 +121,11 @@ public class SSLConfigurationSettings { public static final Setting TRUST_STORE_ALGORITHM_PROFILES = Setting.affixKeySetting("transport.profiles.", "xpack.security.ssl.truststore.algorithm", TRUST_STORE_ALGORITHM_TEMPLATE); + private static final Function>> TRUST_RESTRICTIONS_TEMPLATE = key -> new Setting<>(key, s -> null, + Optional::ofNullable, Property.NodeScope, Property.Filtered); + public static final Setting> TRUST_RESTRICTIONS_PROFILES = Setting.affixKeySetting("transport.profiles.", + "xpack.security.ssl.trust_restrictions", TRUST_RESTRICTIONS_TEMPLATE); + private static final Function> LEGACY_KEY_PASSWORD_TEMPLATE = key -> new Setting<>(key, "", SecureString::new, Property.Deprecated, Property.Filtered, Property.NodeScope); public static final Setting LEGACY_KEY_PASSWORD_PROFILES = Setting.affixKeySetting("transport.profiles.", @@ -173,6 +179,7 @@ public class SSLConfigurationSettings { truststorePassword = TRUSTSTORE_PASSWORD_TEMPLATE.apply(prefix + "truststore.secure_password"); keystoreAlgorithm = KEY_STORE_ALGORITHM_TEMPLATE.apply(prefix + "keystore.algorithm"); truststoreAlgorithm = TRUST_STORE_ALGORITHM_TEMPLATE.apply(prefix + "truststore.algorithm"); + trustRestrictionsPath = TRUST_RESTRICTIONS_TEMPLATE.apply(prefix + "trust_restrictions.path"); keyPath = KEY_PATH_TEMPLATE.apply(prefix + "key"); legacyKeyPassword = LEGACY_KEY_PASSWORD_TEMPLATE.apply(prefix + "key_passphrase"); keyPassword = KEY_PASSWORD_TEMPLATE.apply(prefix + "secure_key_passphrase"); @@ -181,9 +188,11 @@ public class SSLConfigurationSettings { clientAuth = CLIENT_AUTH_SETTING_TEMPLATE.apply(prefix + "client_authentication"); verificationMode = VERIFICATION_MODE_SETTING_TEMPLATE.apply(prefix + "verification_mode"); - this.allSettings = Arrays.asList(ciphers, supportedProtocols, keystorePath, keystorePassword, keystoreAlgorithm, - keystoreKeyPassword, truststorePath, truststorePassword, truststoreAlgorithm, keyPath, keyPassword, cert, caPaths, - clientAuth, verificationMode, legacyKeystorePassword, legacyKeystoreKeyPassword, legacyKeyPassword, legacyTruststorePassword); + this.allSettings = Arrays.asList(ciphers, supportedProtocols, + keystorePath, keystorePassword, keystoreAlgorithm, keystoreKeyPassword, + truststorePath, truststorePassword, truststoreAlgorithm, trustRestrictionsPath, + keyPath, keyPassword, cert, caPaths, clientAuth, verificationMode, + legacyKeystorePassword, legacyKeystoreKeyPassword, legacyKeyPassword, legacyTruststorePassword); } public List> getAllSettings() { @@ -213,8 +222,8 @@ public class SSLConfigurationSettings { return Arrays.asList(CIPHERS_SETTING_PROFILES, SUPPORTED_PROTOCOLS_PROFILES, KEYSTORE_PATH_PROFILES, LEGACY_KEYSTORE_PASSWORD_PROFILES, KEYSTORE_PASSWORD_PROFILES, LEGACY_KEYSTORE_KEY_PASSWORD_PROFILES, KEYSTORE_KEY_PASSWORD_PROFILES, TRUST_STORE_PATH_PROFILES, LEGACY_TRUSTSTORE_PASSWORD_PROFILES, - TRUSTSTORE_PASSWORD_PROFILES, KEY_STORE_ALGORITHM_PROFILES, TRUST_STORE_ALGORITHM_PROFILES,KEY_PATH_PROFILES, - LEGACY_KEY_PASSWORD_PROFILES, KEY_PASSWORD_PROFILES,CERT_PROFILES,CAPATH_SETTING_PROFILES, + TRUSTSTORE_PASSWORD_PROFILES, KEY_STORE_ALGORITHM_PROFILES, TRUST_STORE_ALGORITHM_PROFILES, TRUST_RESTRICTIONS_PROFILES, + KEY_PATH_PROFILES, LEGACY_KEY_PASSWORD_PROFILES, KEY_PASSWORD_PROFILES,CERT_PROFILES,CAPATH_SETTING_PROFILES, CLIENT_AUTH_SETTING_PROFILES, VERIFICATION_MODE_SETTING_PROFILES); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java index 99cc36d1d02..2015b101065 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/ssl/SSLService.java @@ -451,54 +451,8 @@ public class SSLService extends AbstractComponent { // if no key is provided for transport we can auto-generate a key with a signed certificate for development use only. There is a // bootstrap check that prevents this configuration from being use in production (SSLBootstrapCheck) if (transportSSLConfiguration.keyConfig() == KeyConfig.NONE) { - // lazily generate key to avoid slowing down startup where we do not need it - final GeneratedKeyConfig generatedKeyConfig = new GeneratedKeyConfig(settings); - final TrustConfig trustConfig = - new TrustConfig.CombiningTrustConfig(Arrays.asList(transportSSLConfiguration.trustConfig(), new TrustConfig() { - @Override - X509ExtendedTrustManager createTrustManager(@Nullable Environment environment) { - return generatedKeyConfig.createTrustManager(environment); - } + createDevelopmentTLSConfiguration(sslConfigurations, transportSSLConfiguration, profileSettings); - @Override - List filesToMonitor(@Nullable Environment environment) { - return Collections.emptyList(); - } - - @Override - public String toString() { - return "Generated Trust Config. DO NOT USE IN PRODUCTION"; - } - - @Override - public boolean equals(Object o) { - return this == o; - } - - @Override - public int hashCode() { - return System.identityHashCode(this); - } - })); - X509ExtendedTrustManager extendedTrustManager = trustConfig.createTrustManager(env); - ReloadableTrustManager trustManager = new ReloadableTrustManager(extendedTrustManager, trustConfig); - ReloadableX509KeyManager keyManager = - new ReloadableX509KeyManager(generatedKeyConfig.createKeyManager(env), generatedKeyConfig); - sslConfigurations.put(transportSSLConfiguration, createSslContext(keyManager, trustManager, transportSSLConfiguration)); - profileSettings.forEach((profileSetting) -> { - SSLConfiguration configuration = new SSLConfiguration(profileSetting, transportSSLConfiguration); - if (configuration.keyConfig() == KeyConfig.NONE) { - sslConfigurations.compute(configuration, (conf, holder) -> { - if (holder != null && holder.keyManager == keyManager && holder.trustManager == trustManager) { - return holder; - } else { - return createSslContext(keyManager, trustManager, configuration); - } - }); - } else { - sslConfigurations.computeIfAbsent(configuration, this::createSslContext); - } - }); } else { sslConfigurations.computeIfAbsent(transportSSLConfiguration, this::createSslContext); profileSettings.forEach((profileSetting) -> @@ -507,6 +461,60 @@ public class SSLService extends AbstractComponent { return Collections.unmodifiableMap(sslConfigurations); } + private void createDevelopmentTLSConfiguration(Map sslConfigurations, + SSLConfiguration transportSSLConfiguration, List profileSettings) + throws NoSuchAlgorithmException, IOException, CertificateException, OperatorCreationException, UnrecoverableKeyException, + KeyStoreException { + // lazily generate key to avoid slowing down startup where we do not need it + final GeneratedKeyConfig generatedKeyConfig = new GeneratedKeyConfig(settings); + final TrustConfig trustConfig = + new TrustConfig.CombiningTrustConfig(Arrays.asList(transportSSLConfiguration.trustConfig(), new TrustConfig() { + @Override + X509ExtendedTrustManager createTrustManager(@Nullable Environment environment) { + return generatedKeyConfig.createTrustManager(environment); + } + + @Override + List filesToMonitor(@Nullable Environment environment) { + return Collections.emptyList(); + } + + @Override + public String toString() { + return "Generated Trust Config. DO NOT USE IN PRODUCTION"; + } + + @Override + public boolean equals(Object o) { + return this == o; + } + + @Override + public int hashCode() { + return System.identityHashCode(this); + } + })); + X509ExtendedTrustManager extendedTrustManager = trustConfig.createTrustManager(env); + ReloadableTrustManager trustManager = new ReloadableTrustManager(extendedTrustManager, trustConfig); + ReloadableX509KeyManager keyManager = + new ReloadableX509KeyManager(generatedKeyConfig.createKeyManager(env), generatedKeyConfig); + sslConfigurations.put(transportSSLConfiguration, createSslContext(keyManager, trustManager, transportSSLConfiguration)); + profileSettings.forEach((profileSetting) -> { + SSLConfiguration configuration = new SSLConfiguration(profileSetting, transportSSLConfiguration); + if (configuration.keyConfig() == KeyConfig.NONE) { + sslConfigurations.compute(configuration, (conf, holder) -> { + if (holder != null && holder.keyManager == keyManager && holder.trustManager == trustManager) { + return holder; + } else { + return createSslContext(keyManager, trustManager, configuration); + } + }); + } else { + sslConfigurations.computeIfAbsent(configuration, this::createSslContext); + } + }); + } + /** * This socket factory wraps an existing SSLSocketFactory and sets the protocols and ciphers on each SSLSocket after it is created. This * is needed even though the SSLContext is configured properly as the configuration does not flow down to the sockets created by the diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java index 06bcb9ab4eb..7a9f88e842f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java @@ -14,12 +14,12 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.script.Script; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportResponse; -import java.util.Map; import java.util.function.BiConsumer; -import java.util.function.BiFunction; import java.util.function.Consumer; +import java.util.function.Function; /** * Generic upgrade check applicable to all indices to be upgraded from the current version @@ -35,7 +35,7 @@ public class IndexUpgradeCheck extends AbstractComponent { public static final int UPRADE_VERSION = 6; private final String name; - private final BiFunction, UpgradeActionRequired> actionRequired; + private final Function actionRequired; private final InternalIndexReindexer reindexer; /** @@ -50,7 +50,7 @@ public class IndexUpgradeCheck extends AbstractComponent { * @param updateScript - the upgrade script that should be used during reindexing */ public IndexUpgradeCheck(String name, Settings settings, - BiFunction, UpgradeActionRequired> actionRequired, + Function actionRequired, Client client, ClusterService clusterService, String[] types, Script updateScript) { this(name, settings, actionRequired, client, clusterService, types, updateScript, listener -> listener.onResponse(null), (t, listener) -> listener.onResponse(TransportResponse.Empty.INSTANCE)); @@ -70,7 +70,7 @@ public class IndexUpgradeCheck extends AbstractComponent { * @param postUpgrade - action that should be performed after upgrade */ public IndexUpgradeCheck(String name, Settings settings, - BiFunction, UpgradeActionRequired> actionRequired, + Function actionRequired, Client client, ClusterService clusterService, String[] types, Script updateScript, Consumer> preUpgrade, BiConsumer> postUpgrade) { @@ -92,22 +92,22 @@ public class IndexUpgradeCheck extends AbstractComponent { * This method is called by Upgrade API to verify if upgrade or reindex for this index is required * * @param indexMetaData index metadata - * @param params additional user-specified parameters see {@link IndexUpgradeCheckFactory#supportedParams} * @return required action or UpgradeActionRequired.NOT_APPLICABLE if this check cannot be performed on the index */ - public UpgradeActionRequired actionRequired(IndexMetaData indexMetaData, Map params) { - return actionRequired.apply(indexMetaData, params); + public UpgradeActionRequired actionRequired(IndexMetaData indexMetaData) { + return actionRequired.apply(indexMetaData); } /** * Perform the index upgrade * + * @param task the task that executes the upgrade operation * @param indexMetaData index metadata * @param state current cluster state * @param listener the listener that should be called upon completion of the upgrade */ - public void upgrade(IndexMetaData indexMetaData, ClusterState state, + public void upgrade(TaskId task, IndexMetaData indexMetaData, ClusterState state, ActionListener listener) { - reindexer.upgrade(indexMetaData.getIndex().getName(), state, listener); + reindexer.upgrade(task, indexMetaData.getIndex().getName(), state, listener); } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java index 59bda844ae5..491fb39214b 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.tasks.TaskId; import java.util.HashMap; import java.util.List; @@ -39,18 +40,16 @@ public class IndexUpgradeService extends AbstractComponent { * * @param indices list of indices to check, specify _all for all indices * @param options wild card resolution option - * @param params list of additional parameters that will be passed to upgrade checks * @param state the current cluster state * @return a list of indices that should be upgraded/reindexed */ - public Map upgradeInfo(String[] indices, IndicesOptions options, Map params, - ClusterState state) { + public Map upgradeInfo(String[] indices, IndicesOptions options, ClusterState state) { Map results = new HashMap<>(); String[] concreteIndexNames = indexNameExpressionResolver.concreteIndexNames(state, options, indices); MetaData metaData = state.getMetaData(); for (String index : concreteIndexNames) { IndexMetaData indexMetaData = metaData.index(index); - UpgradeActionRequired upgradeActionRequired = upgradeInfo(indexMetaData, index, params); + UpgradeActionRequired upgradeActionRequired = upgradeInfo(indexMetaData, index); if (upgradeActionRequired != null) { results.put(index, upgradeActionRequired); } @@ -58,9 +57,9 @@ public class IndexUpgradeService extends AbstractComponent { return results; } - private UpgradeActionRequired upgradeInfo(IndexMetaData indexMetaData, String index, Map params) { + private UpgradeActionRequired upgradeInfo(IndexMetaData indexMetaData, String index) { for (IndexUpgradeCheck check : upgradeChecks) { - UpgradeActionRequired upgradeActionRequired = check.actionRequired(indexMetaData, params); + UpgradeActionRequired upgradeActionRequired = check.actionRequired(indexMetaData); logger.trace("[{}] check [{}] returned [{}]", index, check.getName(), upgradeActionRequired); switch (upgradeActionRequired) { case UPGRADE: @@ -87,18 +86,17 @@ public class IndexUpgradeService extends AbstractComponent { } } - public void upgrade(String index, Map params, ClusterState state, - ActionListener listener) { + public void upgrade(TaskId task, String index, ClusterState state, ActionListener listener) { IndexMetaData indexMetaData = state.metaData().index(index); if (indexMetaData == null) { throw new IndexNotFoundException(index); } for (IndexUpgradeCheck check : upgradeChecks) { - UpgradeActionRequired upgradeActionRequired = check.actionRequired(indexMetaData, params); + UpgradeActionRequired upgradeActionRequired = check.actionRequired(indexMetaData); switch (upgradeActionRequired) { case UPGRADE: // this index needs to be upgraded - start the upgrade procedure - check.upgrade(indexMetaData, state, listener); + check.upgrade(task, indexMetaData, state, listener); return; case REINDEX: // this index needs to be re-indexed diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexer.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexer.java index d475a8626e6..243cb4d5fda 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexer.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexer.java @@ -5,12 +5,12 @@ */ package org.elasticsearch.xpack.upgrade; -import com.carrotsearch.hppc.procedures.ObjectProcedure; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Client; +import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -23,6 +23,7 @@ import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.script.Script; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportResponse; import java.util.function.BiConsumer; @@ -59,9 +60,10 @@ public class InternalIndexReindexer { this.postUpgrade = postUpgrade; } - public void upgrade(String index, ClusterState clusterState, ActionListener listener) { + public void upgrade(TaskId task, String index, ClusterState clusterState, ActionListener listener) { + ParentTaskAssigningClient parentAwareClient = new ParentTaskAssigningClient(client, task); preUpgrade.accept(ActionListener.wrap( - t -> innerUpgrade(index, clusterState, ActionListener.wrap( + t -> innerUpgrade(parentAwareClient, index, clusterState, ActionListener.wrap( response -> postUpgrade.accept(t, ActionListener.wrap( empty -> listener.onResponse(response), listener::onFailure @@ -71,22 +73,23 @@ public class InternalIndexReindexer { listener::onFailure)); } - private void innerUpgrade(String index, ClusterState clusterState, ActionListener listener) { + private void innerUpgrade(ParentTaskAssigningClient parentAwareClient, String index, ClusterState clusterState, + ActionListener listener) { String newIndex = index + "_v" + version; try { checkMasterAndDataNodeVersion(clusterState); - client.admin().indices().prepareCreate(newIndex).execute(ActionListener.wrap(createIndexResponse -> + parentAwareClient.admin().indices().prepareCreate(newIndex).execute(ActionListener.wrap(createIndexResponse -> setReadOnlyBlock(index, ActionListener.wrap(setReadOnlyResponse -> - reindex(index, newIndex, ActionListener.wrap( + reindex(parentAwareClient, index, newIndex, ActionListener.wrap( bulkByScrollResponse -> // Successful completion of reindexing - delete old index - removeReadOnlyBlock(index, ActionListener.wrap(unsetReadOnlyResponse -> - client.admin().indices().prepareAliases().removeIndex(index) + removeReadOnlyBlock(parentAwareClient, index, ActionListener.wrap(unsetReadOnlyResponse -> + parentAwareClient.admin().indices().prepareAliases().removeIndex(index) .addAlias(newIndex, index).execute(ActionListener.wrap(deleteIndexResponse -> listener.onResponse(bulkByScrollResponse), listener::onFailure )), listener::onFailure )), e -> // Something went wrong during reindexing - remove readonly flag and report the error - removeReadOnlyBlock(index, ActionListener.wrap(unsetReadOnlyResponse -> { + removeReadOnlyBlock(parentAwareClient, index, ActionListener.wrap(unsetReadOnlyResponse -> { listener.onFailure(e); }, e1 -> { listener.onFailure(e); @@ -105,19 +108,21 @@ public class InternalIndexReindexer { } } - private void removeReadOnlyBlock(String index, ActionListener listener) { + private void removeReadOnlyBlock(ParentTaskAssigningClient parentAwareClient, String index, + ActionListener listener) { Settings settings = Settings.builder().put(IndexMetaData.INDEX_READ_ONLY_SETTING.getKey(), false).build(); - client.admin().indices().prepareUpdateSettings(index).setSettings(settings).execute(listener); + parentAwareClient.admin().indices().prepareUpdateSettings(index).setSettings(settings).execute(listener); } - private void reindex(String index, String newIndex, ActionListener listener) { + private void reindex(ParentTaskAssigningClient parentAwareClient, String index, String newIndex, + ActionListener listener) { SearchRequest sourceRequest = new SearchRequest(index); sourceRequest.types(types); IndexRequest destinationRequest = new IndexRequest(newIndex); ReindexRequest reindexRequest = new ReindexRequest(sourceRequest, destinationRequest); reindexRequest.setRefresh(true); reindexRequest.setScript(transformScript); - client.execute(ReindexAction.INSTANCE, reindexRequest, listener); + parentAwareClient.execute(ReindexAction.INSTANCE, reindexRequest, listener); } /** diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java index ffa7c011d6c..c1e01c8a885 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java @@ -5,17 +5,18 @@ */ package org.elasticsearch.xpack.upgrade; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; +import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; @@ -45,30 +46,25 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Set; import java.util.function.BiFunction; import java.util.function.Supplier; public class Upgrade implements ActionPlugin { - public static final Version UPGRADE_INTRODUCED = Version.V_5_5_0; // TODO: Probably will need to change this to 5.6.0 + public static final Version UPGRADE_INTRODUCED = Version.V_5_6_0; + + // this is the required index.format setting for 6.0 services (watcher and security) to start up + // this index setting is set by the upgrade API or automatically when a 6.0 index template is created + private static final int EXPECTED_INDEX_FORMAT_VERSION = 6; private final Settings settings; private final List> upgradeCheckFactories; - private final Set extraParameters; public Upgrade(Settings settings) { this.settings = settings; - this.extraParameters = new HashSet<>(); this.upgradeCheckFactories = new ArrayList<>(); - for (Tuple, BiFunction> checkFactory : Arrays.asList( - getKibanaUpgradeCheckFactory(settings), - getWatcherUpgradeCheckFactory(settings))) { - extraParameters.addAll(checkFactory.v1()); - upgradeCheckFactories.add(checkFactory.v2()); - } + upgradeCheckFactories.add(getWatcherUpgradeCheckFactory(settings)); } public Collection createComponents(InternalClient internalClient, ClusterService clusterService, ThreadPool threadPool, @@ -95,80 +91,65 @@ public class Upgrade implements ActionPlugin { IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { return Arrays.asList( - new RestIndexUpgradeInfoAction(settings, restController, extraParameters), - new RestIndexUpgradeAction(settings, restController, extraParameters) + new RestIndexUpgradeInfoAction(settings, restController), + new RestIndexUpgradeAction(settings, restController) ); } - static Tuple, BiFunction> getKibanaUpgradeCheckFactory( - Settings settings) { - return new Tuple<>( - Collections.singletonList("kibana_indices"), - (internalClient, clusterService) -> - new IndexUpgradeCheck("kibana", - settings, - (indexMetaData, params) -> { - String indexName = indexMetaData.getIndex().getName(); - String kibanaIndicesMasks = params.getOrDefault("kibana_indices", ".kibana"); - String[] kibanaIndices = Strings.delimitedListToStringArray(kibanaIndicesMasks, ","); - if (Regex.simpleMatch(kibanaIndices, indexName)) { - return UpgradeActionRequired.UPGRADE; - } else { - return UpgradeActionRequired.NOT_APPLICABLE; - } - }, internalClient, - clusterService, - Strings.EMPTY_ARRAY, - new Script(ScriptType.INLINE, "painless", "ctx._id = ctx._type + \"-\" + ctx._id;\n" + - "ctx._source = [ ctx._type : ctx._source ];\n" + - "ctx._source.type = ctx._type;\n" + - "ctx._type = \"doc\";", - new HashMap<>()))); + /** + * Checks the format of an internal index and returns true if the index is up to date or false if upgrade is required + */ + public static boolean checkInternalIndexFormat(IndexMetaData indexMetaData) { + return indexMetaData.getSettings().getAsInt(IndexMetaData.INDEX_FORMAT_SETTING.getKey(), 0) == EXPECTED_INDEX_FORMAT_VERSION; } - static Tuple, BiFunction> getWatcherUpgradeCheckFactory( - Settings settings) { - return new Tuple<>( - Collections.emptyList(), - (internalClient, clusterService) -> - new IndexUpgradeCheck("watcher", - settings, - (indexMetaData, params) -> { - if (".watches".equals(indexMetaData.getIndex().getName()) || - indexMetaData.getAliases().containsKey(".watches")) { - if (indexMetaData.getMappings().size() == 1 && indexMetaData.getMappings().containsKey("doc") ) { - return UpgradeActionRequired.UP_TO_DATE; - } else { - return UpgradeActionRequired.UPGRADE; - } - } else { - return UpgradeActionRequired.NOT_APPLICABLE; - } - }, internalClient, - clusterService, - new String[]{"watch"}, - new Script(ScriptType.INLINE, "painless", "ctx._type = \"doc\";\n" + - "if (ctx._source.containsKey(\"_status\") && !ctx._source.containsKey(\"status\") ) {}\n" + - " ctx._source.status = ctx._source.remove(\"_status\");\n" + - "}", - new HashMap<>()), - booleanActionListener -> preWatcherUpgrade(internalClient, booleanActionListener), - (shouldStartWatcher, listener) -> postWatcherUpgrade(internalClient, shouldStartWatcher, listener) - )); + static BiFunction getWatcherUpgradeCheckFactory(Settings settings) { + return (internalClient, clusterService) -> + new IndexUpgradeCheck("watcher", + settings, + indexMetaData -> { + if (".watches".equals(indexMetaData.getIndex().getName()) || + indexMetaData.getAliases().containsKey(".watches")) { + if (checkInternalIndexFormat(indexMetaData)) { + return UpgradeActionRequired.UP_TO_DATE; + } else { + return UpgradeActionRequired.UPGRADE; + } + } else { + return UpgradeActionRequired.NOT_APPLICABLE; + } + }, internalClient, + clusterService, + new String[]{"watch"}, + new Script(ScriptType.INLINE, "painless", "ctx._type = \"doc\";\n" + + "if (ctx._source.containsKey(\"_status\") && !ctx._source.containsKey(\"status\") ) {\n" + + " ctx._source.status = ctx._source.remove(\"_status\");\n" + + "}", + new HashMap<>()), + booleanActionListener -> preWatcherUpgrade(internalClient, booleanActionListener), + (shouldStartWatcher, listener) -> postWatcherUpgrade(internalClient, shouldStartWatcher, listener) + ); } private static void preWatcherUpgrade(Client client, ActionListener listener) { + ActionListener triggeredWatchIndexTemplateListener = deleteIndexTemplateListener("triggered_watches", + listener, () -> listener.onResponse(true)); + + ActionListener watchIndexTemplateListener = deleteIndexTemplateListener("watches", listener, + () -> client.admin().indices().prepareDeleteTemplate("triggered_watches").execute(triggeredWatchIndexTemplateListener)); + new WatcherClient(client).watcherStats(new WatcherStatsRequest(), ActionListener.wrap( stats -> { if (stats.watcherMetaData().manuallyStopped()) { - // don't start the watcher after upgrade + // don't start watcher after upgrade listener.onResponse(false); } else { - // stop the watcher + // stop watcher new WatcherClient(client).watcherService(new WatcherServiceRequest().stop(), ActionListener.wrap( stopResponse -> { if (stopResponse.isAcknowledged()) { - listener.onResponse(true); + // delete old templates before indexing + client.admin().indices().prepareDeleteTemplate("watches").execute(watchIndexTemplateListener); } else { listener.onFailure(new IllegalStateException("unable to stop watcher service")); } @@ -179,16 +160,27 @@ public class Upgrade implements ActionPlugin { } private static void postWatcherUpgrade(Client client, Boolean shouldStartWatcher, ActionListener listener) { - client.admin().indices().prepareDelete("triggered-watches").execute(ActionListener.wrap(deleteIndexResponse -> { - startWatcherIfNeeded(shouldStartWatcher, client, listener); - }, e -> { - if (e instanceof IndexNotFoundException) { - startWatcherIfNeeded(shouldStartWatcher, client, listener); - } else { - listener.onFailure(e); - } - } - )); + ActionListener deleteTriggeredWatchIndexResponse = ActionListener.wrap(deleteIndexResponse -> + startWatcherIfNeeded(shouldStartWatcher, client, listener), e -> { + if (e instanceof IndexNotFoundException) { + startWatcherIfNeeded(shouldStartWatcher, client, listener); + } else { + listener.onFailure(e); + } + }); + + client.admin().indices().prepareDelete(".triggered_watches").execute(deleteTriggeredWatchIndexResponse); + } + + private static ActionListener deleteIndexTemplateListener(String name, ActionListener listener, + Runnable runnable) { + return ActionListener.wrap(r -> { + if (r.isAcknowledged()) { + runnable.run(); + } else { + listener.onFailure(new ElasticsearchException("Deleting [{}] template was not acknowledged", name)); + } + }, listener::onFailure); } private static void startWatcherIfNeeded(Boolean shouldStartWatcher, Client client, ActionListener listener) { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeAction.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeAction.java index fc2efda6c3a..d7f410b02c7 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeAction.java @@ -25,13 +25,14 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.upgrade.IndexUpgradeService; import java.io.IOException; -import java.util.Collections; -import java.util.Map; import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -60,7 +61,6 @@ public class IndexUpgradeAction extends Action implements IndicesRequest { private String index = null; - private Map extraParams = Collections.emptyMap(); // for serialization public Request() { @@ -95,24 +95,12 @@ public class IndexUpgradeAction extends Action extraParams() { - return extraParams; - } - - public Request extraParams(Map extraParams) { - this.extraParams = extraParams; - return this; - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (index == null) { validationException = addValidationError("index is missing", validationException); } - if (extraParams == null) { - validationException = addValidationError("params are missing", validationException); - } return validationException; } @@ -120,14 +108,12 @@ public class IndexUpgradeAction extends Action params) { - request.extraParams(params); - return this; - } - - } public static class TransportAction extends TransportMasterNodeAction { @@ -195,8 +183,16 @@ public class IndexUpgradeAction extends Action listener) { - indexUpgradeService.upgrade(request.index(), request.extraParams(), state, listener); + protected final void masterOperation(Task task, Request request, ClusterState state, + ActionListener listener) { + TaskId taskId = new TaskId(clusterService.localNode().getId(), task.getId()); + indexUpgradeService.upgrade(taskId, request.index(), state, listener); } + + @Override + protected final void masterOperation(Request request, ClusterState state, ActionListener listener) { + throw new UnsupportedOperationException("the task parameter is required"); + } + } } \ No newline at end of file diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoAction.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoAction.java index 4ba2b001c83..4959950d450 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoAction.java @@ -37,7 +37,6 @@ import org.elasticsearch.xpack.upgrade.UpgradeActionRequired; import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.Map; import java.util.Objects; @@ -125,8 +124,7 @@ public class IndexUpgradeInfoAction extends Action implements IndicesRequest.Replaceable { private String[] indices = null; - private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true); - private Map extraParams = Collections.emptyMap(); + private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true); // for serialization public Request() { @@ -157,24 +155,12 @@ public class IndexUpgradeInfoAction extends Action extraParams() { - return extraParams; - } - - public Request extraParams(Map extraParams) { - this.extraParams = extraParams; - return this; - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (indices == null) { validationException = addValidationError("index/indices is missing", validationException); } - if (extraParams == null) { - validationException = addValidationError("params are missing", validationException); - } return validationException; } @@ -183,7 +169,6 @@ public class IndexUpgradeInfoAction extends Action params) { - request.extraParams(params); - return this; - } - - } public static class TransportAction extends TransportMasterNodeReadAction { @@ -272,7 +248,7 @@ public class IndexUpgradeInfoAction extends Action listener) { if (licenseState.isUpgradeAllowed()) { Map results = - indexUpgradeService.upgradeInfo(request.indices(), request.indicesOptions(), request.extraParams(), state); + indexUpgradeService.upgradeInfo(request.indices(), request.indicesOptions(), state); listener.onResponse(new Response(results)); } else { listener.onFailure(LicenseUtils.newComplianceException(XPackPlugin.UPGRADE)); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeAction.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeAction.java index 7678c19ac50..ae0b309bf19 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeAction.java @@ -27,15 +27,11 @@ import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeAction.Request; import java.io.IOException; import java.util.HashMap; import java.util.Map; -import java.util.Set; public class RestIndexUpgradeAction extends BaseRestHandler { - private final Set extraParameters; - - public RestIndexUpgradeAction(Settings settings, RestController controller, Set extraParameters) { + public RestIndexUpgradeAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(RestRequest.Method.POST, "_xpack/migration/upgrade/{index}", this); - this.extraParameters = extraParameters; } @Override @@ -54,14 +50,6 @@ public class RestIndexUpgradeAction extends BaseRestHandler { private RestChannelConsumer handlePost(final RestRequest request, NodeClient client) { Request upgradeRequest = new Request(request.param("index")); - Map extraParamsMap = new HashMap<>(); - for (String param : extraParameters) { - String value = request.param(param); - if (value != null) { - extraParamsMap.put(param, value); - } - } - upgradeRequest.extraParams(extraParamsMap); Map params = new HashMap<>(); params.put(BulkByScrollTask.Status.INCLUDE_CREATED, Boolean.toString(true)); params.put(BulkByScrollTask.Status.INCLUDE_UPDATED, Boolean.toString(true)); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeInfoAction.java b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeInfoAction.java index 6a900687bc9..74e40d1f279 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeInfoAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeInfoAction.java @@ -17,21 +17,15 @@ import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeInfoAction; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeInfoAction.Request; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; public class RestIndexUpgradeInfoAction extends BaseRestHandler { - private final Set extraParameters; - public RestIndexUpgradeInfoAction(Settings settings, RestController controller, Set extraParameters) { + public RestIndexUpgradeInfoAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(RestRequest.Method.GET, "/_xpack/migration/assistance", this); controller.registerHandler(RestRequest.Method.GET, "/_xpack/migration/assistance/{index}", this); - this.extraParameters = extraParameters; } - @Override public String getName() { return "xpack_migration_assistance"; @@ -49,14 +43,6 @@ public class RestIndexUpgradeInfoAction extends BaseRestHandler { private RestChannelConsumer handleGet(final RestRequest request, NodeClient client) { Request infoRequest = new Request(Strings.splitStringByCommaToArray(request.param("index"))); infoRequest.indicesOptions(IndicesOptions.fromRequest(request, infoRequest.indicesOptions())); - Map extraParamsMap = new HashMap<>(); - for (String param : extraParameters) { - String value = request.param(param); - if (value != null) { - extraParamsMap.put(param, value); - } - } - infoRequest.extraParams(extraParamsMap); return channel -> client.execute(IndexUpgradeInfoAction.INSTANCE, infoRequest, new RestToXContentListener<>(channel)); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index 28b8a60f56a..ab94773f1a4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -514,9 +514,7 @@ public class Watcher implements ActionPlugin { // These are all old templates from pre 6.0 era, that need to be deleted public UnaryOperator> getIndexTemplateMetaDataUpgrader() { return map -> { - map.keySet().removeIf(name -> "watches".equals(name) || "triggered_watches".equals(name) - || name.startsWith("watch_history_")); - + map.keySet().removeIf(name -> name.startsWith("watch_history_")); return map; }; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index dd874206d4f..8b6f98ef1b4 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -192,18 +192,23 @@ final class WatcherIndexingListener extends AbstractComponent implements Indexin */ @Override public void clusterChanged(ClusterChangedEvent event) { - if (event.state().nodes().getLocalNode().isDataNode() && event.metaDataChanged()) { - try { - IndexMetaData metaData = WatchStoreUtils.getConcreteIndex(Watch.INDEX, event.state().metaData()); - if (metaData == null) { + boolean isWatchExecutionDistributed = WatcherLifeCycleService.isWatchExecutionDistributed(event.state()); + if (isWatchExecutionDistributed) { + if (event.state().nodes().getLocalNode().isDataNode() && event.metaDataChanged()) { + try { + IndexMetaData metaData = WatchStoreUtils.getConcreteIndex(Watch.INDEX, event.state().metaData()); + if (metaData == null) { + configuration = INACTIVE; + } else { + checkWatchIndexHasChanged(metaData, event); + } + } catch (IllegalStateException e) { + logger.error("error loading watches index: [{}]", e.getMessage()); configuration = INACTIVE; - } else { - checkWatchIndexHasChanged(metaData, event); } - } catch (IllegalStateException e) { - logger.error("error loading watches index: [{}]", e.getMessage()); - configuration = INACTIVE; } + } else { + configuration = INACTIVE; } } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java index 4a32c094d63..9458107c60f 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleService.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.watcher; +import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -19,6 +20,7 @@ import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.upgrade.Upgrade; import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore; import org.elasticsearch.xpack.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.watch.WatchStoreUtils; @@ -34,22 +36,16 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; public class WatcherLifeCycleService extends AbstractComponent implements ClusterStateListener { - // this is the required index.format setting for watcher to start up at all - // this index setting is set by the upgrade API or automatically when a 6.0 index template is created - private static final int EXPECTED_INDEX_FORMAT_VERSION = 6; - private final WatcherService watcherService; - private final ClusterService clusterService; private final ExecutorService executor; private AtomicReference> previousAllocationIds = new AtomicReference<>(Collections.emptyList()); private volatile WatcherMetaData watcherMetaData; - public WatcherLifeCycleService(Settings settings, ThreadPool threadPool, ClusterService clusterService, - WatcherService watcherService) { + WatcherLifeCycleService(Settings settings, ThreadPool threadPool, ClusterService clusterService, + WatcherService watcherService) { super(settings); this.executor = threadPool.executor(ThreadPool.Names.GENERIC); this.watcherService = watcherService; - this.clusterService = clusterService; clusterService.addListener(this); // Close if the indices service is being stopped, so we don't run into search failures (locally) that will // happen because we're shutting down and an watch is scheduled. @@ -62,10 +58,6 @@ public class WatcherLifeCycleService extends AbstractComponent implements Cluste watcherMetaData = new WatcherMetaData(!settings.getAsBoolean("xpack.watcher.start_immediately", true)); } - public void start() { - start(clusterService.state(), true); - } - public void stop(String reason) { watcherService.stop(reason); } @@ -123,60 +115,89 @@ public class WatcherLifeCycleService extends AbstractComponent implements Cluste if (currentWatcherStopped) { executor.execute(() -> this.stop("watcher manually marked to shutdown in cluster state update, shutting down")); } else { - if (watcherService.state() == WatcherState.STARTED && event.state().nodes().getLocalNode().isDataNode()) { - DiscoveryNode localNode = event.state().nodes().getLocalNode(); - RoutingNode routingNode = event.state().getRoutingNodes().node(localNode.getId()); - IndexMetaData watcherIndexMetaData = WatchStoreUtils.getConcreteIndex(Watch.INDEX, event.state().metaData()); + // if there are old nodes in the cluster hosting the watch index shards, we cannot run distributed, only on the master node + boolean isDistributedWatchExecutionEnabled = isWatchExecutionDistributed(event.state()); + if (isDistributedWatchExecutionEnabled) { + if (watcherService.state() == WatcherState.STARTED && event.state().nodes().getLocalNode().isDataNode()) { + DiscoveryNode localNode = event.state().nodes().getLocalNode(); + RoutingNode routingNode = event.state().getRoutingNodes().node(localNode.getId()); + IndexMetaData watcherIndexMetaData = WatchStoreUtils.getConcreteIndex(Watch.INDEX, event.state().metaData()); - // no watcher index, time to pause, if we currently have shards here - if (watcherIndexMetaData == null) { - if (previousAllocationIds.get().isEmpty() == false) { - previousAllocationIds.set(Collections.emptyList()); - executor.execute(() -> watcherService.pauseExecution("no watcher index found")); + // no watcher index, time to pause, as there are for sure no shards on this node + if (watcherIndexMetaData == null) { + if (previousAllocationIds.get().isEmpty() == false) { + previousAllocationIds.set(Collections.emptyList()); + executor.execute(() -> watcherService.pauseExecution("no watcher index found")); + } + return; } - return; - } - String watchIndex = watcherIndexMetaData.getIndex().getName(); - List localShards = routingNode.shardsWithState(watchIndex, RELOCATING, STARTED); + String watchIndex = watcherIndexMetaData.getIndex().getName(); + List localShards = routingNode.shardsWithState(watchIndex, RELOCATING, STARTED); - // no local shards, empty out watcher and not waste resources! - if (localShards.isEmpty()) { - if (previousAllocationIds.get().isEmpty() == false) { - executor.execute(() -> watcherService.pauseExecution("no local watcher shards")); - previousAllocationIds.set(Collections.emptyList()); + // no local shards, empty out watcher and not waste resources! + if (localShards.isEmpty()) { + if (previousAllocationIds.get().isEmpty() == false) { + executor.execute(() -> watcherService.pauseExecution("no local watcher shards")); + previousAllocationIds.set(Collections.emptyList()); + } + return; } - return; - } - List currentAllocationIds = localShards.stream() - .map(ShardRouting::allocationId) - .map(AllocationId::getId) - .collect(Collectors.toList()); - Collections.sort(currentAllocationIds); + List currentAllocationIds = localShards.stream() + .map(ShardRouting::allocationId) + .map(AllocationId::getId) + .collect(Collectors.toList()); + Collections.sort(currentAllocationIds); - if (previousAllocationIds.get().equals(currentAllocationIds) == false) { - previousAllocationIds.set(currentAllocationIds); - executor.execute(() -> watcherService.reload(event.state(), "different shard allocation ids")); + if (previousAllocationIds.get().equals(currentAllocationIds) == false) { + previousAllocationIds.set(currentAllocationIds); + executor.execute(() -> watcherService.reload(event.state(), "different shard allocation ids")); + } + } else if (watcherService.state() != WatcherState.STARTED && watcherService.state() != WatcherState.STARTING) { + IndexMetaData watcherIndexMetaData = WatchStoreUtils.getConcreteIndex(Watch.INDEX, event.state().metaData()); + IndexMetaData triggeredWatchesIndexMetaData = WatchStoreUtils.getConcreteIndex(TriggeredWatchStore.INDEX_NAME, + event.state().metaData()); + boolean isIndexInternalFormatWatchIndex = watcherIndexMetaData == null || + Upgrade.checkInternalIndexFormat(watcherIndexMetaData); + boolean isIndexInternalFormatTriggeredWatchIndex = triggeredWatchesIndexMetaData == null || + Upgrade.checkInternalIndexFormat(triggeredWatchesIndexMetaData); + if (isIndexInternalFormatTriggeredWatchIndex && isIndexInternalFormatWatchIndex) { + executor.execute(() -> start(event.state(), false)); + } else { + logger.warn("Not starting watcher, the indices have not been upgraded yet. Please run the Upgrade API"); + } } - } else if (watcherService.state() != WatcherState.STARTED && watcherService.state() != WatcherState.STARTING) { - IndexMetaData watcherIndexMetaData = WatchStoreUtils.getConcreteIndex(Watch.INDEX, event.state().metaData()); - IndexMetaData triggeredWatchesIndexMetaData = WatchStoreUtils.getConcreteIndex(TriggeredWatchStore.INDEX_NAME, - event.state().metaData()); - String indexFormatSetting = IndexMetaData.INDEX_FORMAT_SETTING.getKey(); - boolean isIndexInternalFormatWatchIndex = watcherIndexMetaData == null || - watcherIndexMetaData.getSettings().getAsInt(indexFormatSetting, 0) == EXPECTED_INDEX_FORMAT_VERSION; - boolean isIndexInternalFormatTriggeredWatchIndex = triggeredWatchesIndexMetaData == null || - triggeredWatchesIndexMetaData.getSettings().getAsInt(indexFormatSetting, 0) == EXPECTED_INDEX_FORMAT_VERSION; - if (isIndexInternalFormatTriggeredWatchIndex && isIndexInternalFormatWatchIndex) { - executor.execute(() -> start(event.state(), false)); + } else { + if (event.localNodeMaster()) { + if (watcherService.state() != WatcherState.STARTED && watcherService.state() != WatcherState.STARTING) { + executor.execute(() -> start(event.state(), false)); + } } else { - logger.warn("Not starting watcher, the indices have not been upgraded yet. Please run the Upgrade API"); + if (watcherService.state() == WatcherState.STARTED || watcherService.state() == WatcherState.STARTING) { + executor.execute(() -> watcherService.pauseExecution("Pausing watcher, cluster contains old nodes not supporting" + + " distributed watch execution")); + } } } } } + /** + * Checks if the preconditions are given to run watcher with distributed watch execution. + * The following requirements need to be fulfilled + * + * 1. The master node must run on a version greather than or equal 6.0 + * 2. The nodes holding the watcher shards must run on a version greater than or equal 6.0 + * + * @param state The cluster to check against + * @return true, if the above requirements are fulfilled, false otherwise + */ + public static boolean isWatchExecutionDistributed(ClusterState state) { + // short circuit if all nodes are on 6.x, should be standard after upgrade + return state.nodes().getMinNodeVersion().onOrAfter(Version.V_6_0_0_beta1); + } + public WatcherMetaData watcherMetaData() { return watcherMetaData; } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java index 5f6eac779df..bf368ea6e36 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java @@ -56,7 +56,6 @@ public class WatcherSearchTemplateRequest implements ToXContentObject { public WatcherSearchTemplateRequest(String[] indices, String[] types, SearchType searchType, IndicesOptions indicesOptions, Script template) { - assert template == null || Script.DEFAULT_TEMPLATE_LANG.equals(template.getLang()); this.indices = indices; this.types = types; this.searchType = searchType; @@ -248,13 +247,6 @@ public class WatcherSearchTemplateRequest implements ToXContentObject { DEFAULT_INDICES_OPTIONS); } else if (TEMPLATE_FIELD.match(currentFieldName)) { template = Script.parse(parser, Script.DEFAULT_TEMPLATE_LANG); - - // for deprecation of stored script namespaces the default lang is ignored, - // so the template lang must be set for a stored script - if (template.getType() == ScriptType.STORED) { - template = new Script( - ScriptType.STORED, Script.DEFAULT_TEMPLATE_LANG, template.getIdOrCode(), template.getParams()); - } } else { throw new ElasticsearchParseException("could not read search request. unexpected object field [" + currentFieldName + "]"); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java index b8fd2591ed2..42396141015 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateService.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.watcher.Watcher; @@ -48,7 +49,8 @@ public class WatcherSearchTemplateService extends AbstractComponent { watcherContextParams.putAll(source.getParams()); } // Templates are always of lang mustache: - Script template = new Script(source.getType(), "mustache", source.getIdOrCode(), source.getOptions(), watcherContextParams); + Script template = new Script(source.getType(), source.getType() == ScriptType.STORED ? null : "mustache", + source.getIdOrCode(), source.getOptions(), watcherContextParams); TemplateScript.Factory compiledTemplate = scriptService.compile(template, Watcher.SCRIPT_TEMPLATE_CONTEXT); return compiledTemplate.newInstance(template.getParams()).execute(); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java index eed4d64356c..74e80f991ca 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java @@ -6,11 +6,16 @@ package org.elasticsearch.xpack.watcher.transport.actions; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; @@ -18,23 +23,56 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.watcher.WatcherLifeCycleService; +import org.elasticsearch.xpack.watcher.watch.Watch; +import org.elasticsearch.xpack.watcher.watch.WatchStoreUtils; import java.util.function.Supplier; -public abstract class WatcherTransportAction - extends HandledTransportAction { +public abstract class WatcherTransportAction, Response extends ActionResponse> + extends TransportMasterNodeAction { protected final XPackLicenseState licenseState; + private final ClusterService clusterService; + private final Supplier response; public WatcherTransportAction(Settings settings, String actionName, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - XPackLicenseState licenseState, Supplier request) { - super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request); + XPackLicenseState licenseState, ClusterService clusterService, Supplier request, + Supplier response) { + super(settings, actionName, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, request); this.licenseState = licenseState; + this.clusterService = clusterService; + this.response = response; + } + + protected String executor() { + return ThreadPool.Names.GENERIC; } @Override - protected void doExecute(Task task, Request request, ActionListener listener) { + protected Response newResponse() { + return response.get(); + } + + protected abstract void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception; + + protected boolean localExecute(Request request) { + return WatcherLifeCycleService.isWatchExecutionDistributed(clusterService.state()); + } + + @Override + protected ClusterBlockException checkBlock(Request request, ClusterState state) { + IndexMetaData index = WatchStoreUtils.getConcreteIndex(Watch.INDEX, state.metaData()); + if (index != null) { + return state.blocks().indexBlockedException(ClusterBlockLevel.WRITE, index.getIndex().getName()); + } else { + return state.blocks().globalBlockedException(ClusterBlockLevel.WRITE); + } + } + + @Override + protected void doExecute(Task task, final Request request, ActionListener listener) { if (licenseState.isWatcherAllowed()) { super.doExecute(task, request, listener); } else { diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java index a0a8e17d7db..e56d8dcb9ec 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java @@ -12,8 +12,10 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -44,16 +46,17 @@ public class TransportAckWatchAction extends WatcherTransportAction listener) { + protected void masterOperation(AckWatchRequest request, ClusterState state, + ActionListener listener) throws Exception { GetRequest getRequest = new GetRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId()) .preference(Preference.LOCAL.type()).realtime(true); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java index 385e3ad5fa7..2a1cd1e7a40 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/activate/TransportActivateWatchAction.java @@ -12,8 +12,10 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -23,6 +25,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; +import org.elasticsearch.xpack.watcher.trigger.TriggerService; import org.elasticsearch.xpack.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.watch.WatchStatus; import org.joda.time.DateTime; @@ -42,21 +45,25 @@ public class TransportActivateWatchAction extends WatcherTransportAction listener) { + protected void masterOperation(ActivateWatchRequest request, ClusterState state, ActionListener listener) + throws Exception { + try { DateTime now = new DateTime(clock.millis(), UTC); UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId()); @@ -77,6 +84,13 @@ public class TransportActivateWatchAction extends WatcherTransportAction listener) { + protected void masterOperation(ExecuteWatchRequest request, ClusterState state, + ActionListener listener) throws Exception { if (request.getId() != null) { GetRequest getRequest = new GetRequest(Watch.INDEX, Watch.DOC_TYPE, request.getId()) .preference(Preference.LOCAL.type()).realtime(true); @@ -135,4 +139,5 @@ public class TransportExecuteWatchAction extends WatcherTransportAction { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); + if (in.getVersion().before(Version.V_6_0_0_alpha1)) { + in.readLong(); + in.readByte(); + } id = in.readString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); + if (out.getVersion().before(Version.V_6_0_0_alpha1)) { + out.writeLong(1); + out.writeByte(VersionType.INTERNAL.getValue()); + } out.writeString(id); } diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java index 54738872105..d458f5f7493 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java @@ -9,8 +9,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,16 +41,17 @@ public class TransportGetWatchAction extends WatcherTransportAction listener) { + protected void masterOperation(GetWatchRequest request, ClusterState state, + ActionListener listener) throws Exception { GetRequest getRequest = new GetRequest(Watch.INDEX, Watch.DOC_TYPE, request.getId()) .preference(Preference.LOCAL.type()).realtime(true); diff --git a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java index 732b426f544..e302a0384ac 100644 --- a/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java +++ b/plugin/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/put/TransportPutWatchAction.java @@ -10,7 +10,9 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -22,6 +24,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; +import org.elasticsearch.xpack.watcher.trigger.TriggerService; import org.elasticsearch.xpack.watcher.watch.Payload; import org.elasticsearch.xpack.watcher.watch.Watch; import org.joda.time.DateTime; @@ -36,20 +39,24 @@ public class TransportPutWatchAction extends WatcherTransportAction listener) { + protected void masterOperation(PutWatchRequest request, ClusterState state, + ActionListener listener) throws Exception { try { DateTime now = new DateTime(clock.millis(), UTC); Watch watch = parser.parseWithSecrets(request.getId(), false, request.getSource(), now, request.xContentType()); @@ -66,6 +73,9 @@ public class TransportPutWatchAction extends WatcherTransportAction { boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED; + if (localExecute(request) == false && watch.status().state().isActive()) { + triggerService.add(watch); + } listener.onResponse(new PutWatchResponse(indexResponse.getId(), indexResponse.getVersion(), created)); }, listener::onFailure)); } diff --git a/plugin/src/main/resources/monitoring-alerts.json b/plugin/src/main/resources/monitoring-alerts.json index 5f6f2a4cb67..4f8f8a657f4 100644 --- a/plugin/src/main/resources/monitoring-alerts.json +++ b/plugin/src/main/resources/monitoring-alerts.json @@ -1,6 +1,6 @@ { "index_patterns": ".monitoring-alerts-${monitoring.template.version}", - "version": 6000002, + "version": 6000026, "settings": { "index": { "number_of_shards": 1, diff --git a/plugin/src/main/resources/monitoring-beats.json b/plugin/src/main/resources/monitoring-beats.json index 7283e4e45cd..c98c63979ac 100644 --- a/plugin/src/main/resources/monitoring-beats.json +++ b/plugin/src/main/resources/monitoring-beats.json @@ -1,6 +1,6 @@ { "index_patterns": ".monitoring-beats-${monitoring.template.version}-*", - "version": 6000002, + "version": 6000026, "settings": { "index.number_of_shards": 1, "index.number_of_replicas": 1, diff --git a/plugin/src/main/resources/monitoring-es.json b/plugin/src/main/resources/monitoring-es.json index dddb89c268b..ee88e019acc 100644 --- a/plugin/src/main/resources/monitoring-es.json +++ b/plugin/src/main/resources/monitoring-es.json @@ -1,6 +1,6 @@ { "index_patterns": ".monitoring-es-${monitoring.template.version}-*", - "version": 6000002, + "version": 6000026, "settings": { "index.number_of_shards": 1, "index.number_of_replicas": 1, diff --git a/plugin/src/main/resources/monitoring-kibana.json b/plugin/src/main/resources/monitoring-kibana.json index 66cc418b7eb..eeafc29ed5c 100644 --- a/plugin/src/main/resources/monitoring-kibana.json +++ b/plugin/src/main/resources/monitoring-kibana.json @@ -1,6 +1,6 @@ { "index_patterns": ".monitoring-kibana-${monitoring.template.version}-*", - "version": 6000002, + "version": 6000026, "settings": { "index.number_of_shards": 1, "index.number_of_replicas": 1, @@ -77,18 +77,40 @@ } } }, + "cloud": { + "properties": { + "name": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "vm_type": { + "type": "keyword" + }, + "region": { + "type": "keyword" + }, + "zone": { + "type": "keyword" + }, + "metadata": { + "type": "object" + } + } + }, "os": { "properties": { "load": { "properties": { "1m": { - "type": "float" + "type": "half_float" }, "5m": { - "type": "float" + "type": "half_float" }, "15m": { - "type": "float" + "type": "half_float" } } }, diff --git a/plugin/src/main/resources/monitoring-logstash.json b/plugin/src/main/resources/monitoring-logstash.json index 095de0d41ea..9458e07b157 100644 --- a/plugin/src/main/resources/monitoring-logstash.json +++ b/plugin/src/main/resources/monitoring-logstash.json @@ -1,6 +1,6 @@ { "index_patterns": ".monitoring-logstash-${monitoring.template.version}-*", - "version": 6000002, + "version": 6000026, "settings": { "index.number_of_shards": 1, "index.number_of_replicas": 1, diff --git a/plugin/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json b/plugin/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json index d7b362ff8f7..42ed62d8323 100644 --- a/plugin/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json +++ b/plugin/src/main/resources/monitoring/watches/elasticsearch_cluster_status.json @@ -7,7 +7,7 @@ "link": "elasticsearch/indices", "severity": 2100, "type": "monitoring", - "version_created": 6000002, + "version_created": 6000026, "watch": "${monitoring.watch.id}" } }, @@ -43,7 +43,7 @@ "filter": [ { "term": { - "cluster_uuid": "${monitoring.watch.cluster_uuid}" + "cluster_uuid": "{{ctx.metadata.xpack.cluster_uuid}}" } }, { @@ -75,8 +75,7 @@ "search": { "request": { "indices": [ - ".monitoring-alerts-6", - ".monitoring-alerts-2" + ".monitoring-alerts-6" ], "body": { "size": 1, @@ -85,7 +84,7 @@ "bool": { "filter": { "term": { - "_id": "${monitoring.watch.unique_id}" + "_id": "{{ctx.watch_id}}" } } } diff --git a/plugin/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json b/plugin/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json index 0991999476c..242bd116aa0 100644 --- a/plugin/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json +++ b/plugin/src/main/resources/monitoring/watches/elasticsearch_version_mismatch.json @@ -7,7 +7,7 @@ "link": "elasticsearch/nodes", "severity": 1000, "type": "monitoring", - "version_created": 6000002, + "version_created": 6000026, "watch": "${monitoring.watch.id}" } }, @@ -36,7 +36,7 @@ "filter": [ { "term": { - "_id": "${monitoring.watch.cluster_uuid}" + "_id": "{{ctx.metadata.xpack.cluster_uuid}}" } }, { @@ -71,8 +71,7 @@ "search": { "request": { "indices": [ - ".monitoring-alerts-6", - ".monitoring-alerts-2" + ".monitoring-alerts-6" ], "body": { "size": 1, @@ -81,7 +80,7 @@ "bool": { "filter": { "term": { - "_id": "${monitoring.watch.unique_id}" + "_id": "{{ctx.watch_id}}" } } } diff --git a/plugin/src/main/resources/monitoring/watches/kibana_version_mismatch.json b/plugin/src/main/resources/monitoring/watches/kibana_version_mismatch.json index 4f0d036f538..ce425a4a0dd 100644 --- a/plugin/src/main/resources/monitoring/watches/kibana_version_mismatch.json +++ b/plugin/src/main/resources/monitoring/watches/kibana_version_mismatch.json @@ -7,7 +7,7 @@ "link": "kibana/instances", "severity": 1000, "type": "monitoring", - "version_created": 6000002, + "version_created": 6000026, "watch": "${monitoring.watch.id}" } }, @@ -33,7 +33,7 @@ "filter": [ { "term": { - "cluster_uuid": "${monitoring.watch.cluster_uuid}" + "cluster_uuid": "{{ctx.metadata.xpack.cluster_uuid}}" } }, { @@ -98,8 +98,7 @@ "search": { "request": { "indices": [ - ".monitoring-alerts-6", - ".monitoring-alerts-2" + ".monitoring-alerts-6" ], "body": { "size": 1, @@ -108,7 +107,7 @@ "bool": { "filter": { "term": { - "_id": "${monitoring.watch.unique_id}" + "_id": "{{ctx.watch_id}}" } } } diff --git a/plugin/src/main/resources/monitoring/watches/logstash_version_mismatch.json b/plugin/src/main/resources/monitoring/watches/logstash_version_mismatch.json index cd1007339b0..7e625eb6452 100644 --- a/plugin/src/main/resources/monitoring/watches/logstash_version_mismatch.json +++ b/plugin/src/main/resources/monitoring/watches/logstash_version_mismatch.json @@ -7,7 +7,7 @@ "link": "logstash/instances", "severity": 1000, "type": "monitoring", - "version_created": 6000002, + "version_created": 6000026, "watch": "${monitoring.watch.id}" } }, @@ -33,7 +33,7 @@ "filter": [ { "term": { - "cluster_uuid": "${monitoring.watch.cluster_uuid}" + "cluster_uuid": "{{ctx.metadata.xpack.cluster_uuid}}" } }, { @@ -98,8 +98,7 @@ "search": { "request": { "indices": [ - ".monitoring-alerts-6", - ".monitoring-alerts-2" + ".monitoring-alerts-6" ], "body": { "size": 1, @@ -108,7 +107,7 @@ "bool": { "filter": { "term": { - "_id": "${monitoring.watch.unique_id}" + "_id": "{{ctx.watch_id}}" } } } diff --git a/plugin/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java b/plugin/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java index 026a8e63dbe..42af765dd15 100644 --- a/plugin/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java +++ b/plugin/src/test/java/org/elasticsearch/integration/ClearRealmsCacheTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheRequest; import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheResponse; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.Realm; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.support.Hasher; @@ -41,7 +41,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -import static org.mockito.Mockito.mock; public class ClearRealmsCacheTests extends SecurityIntegTestCase { private static final String USERS_PASSWD_HASHED = new String(Hasher.BCRYPT.hash(new SecureString("passwd".toCharArray()))); @@ -234,9 +233,9 @@ public class ClearRealmsCacheTests extends SecurityIntegTestCase { Map> users = new HashMap<>(); for (Realm realm : realms) { for (String username : usernames) { - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(tokens.get(username), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(tokens.get(username), future); + User user = future.actionGet().getUser(); assertThat(user, notNullValue()); Map realmToUser = users.get(username); if (realmToUser == null) { @@ -251,9 +250,9 @@ public class ClearRealmsCacheTests extends SecurityIntegTestCase { for (String username : usernames) { for (Realm realm : realms) { - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(tokens.get(username), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(tokens.get(username), future); + User user = future.actionGet().getUser(); assertThat(user, sameInstance(users.get(username).get(realm))); } } @@ -264,9 +263,9 @@ public class ClearRealmsCacheTests extends SecurityIntegTestCase { // now, user_a should have been evicted, but user_b should still be cached for (String username : usernames) { for (Realm realm : realms) { - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(tokens.get(username), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(tokens.get(username), future); + User user = future.actionGet().getUser(); assertThat(user, notNullValue()); scenario.assertEviction(users.get(username).get(realm), user); } diff --git a/plugin/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/plugin/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index 8336ef5c08b..81416305994 100644 --- a/plugin/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/plugin/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.Version; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.fieldstats.FieldStatsResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.search.MultiSearchResponse; @@ -23,13 +22,13 @@ import org.elasticsearch.client.Requests; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.join.ParentJoinPlugin; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -54,7 +53,6 @@ import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -66,7 +64,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; // The random usage of meta fields such as _timestamp add noise to the test, so disable random index templates: -@ESIntegTestCase.ClusterScope(randomDynamicTemplates = false) +@ESIntegTestCase.ClusterScope public class FieldLevelSecurityTests extends SecurityIntegTestCase { protected static final SecureString USERS_PASSWD = new SecureString("change_me".toCharArray()); @@ -664,87 +662,6 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); } - public void testFieldStatsApi() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("type1", "field1", "type=text", "field2", "type=text", "field3", "type=text") - ); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2", "field3", "value3") - .setRefreshPolicy(IMMEDIATE) - .get(); - - // user1 is granted access to field1 only: - FieldStatsResponse response = client() - .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) - .prepareFieldStats() - .setFields("field1", "field2", "field3") - .get(); - assertThat(response.getAllFieldStats().size(), equalTo(1)); - assertThat(response.getAllFieldStats().get("field1").getDocCount(), equalTo(1L)); - - // user2 is granted access to field2 only: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareFieldStats() - .setFields("field1", "field2", "field3") - .get(); - assertThat(response.getAllFieldStats().size(), equalTo(1)); - assertThat(response.getAllFieldStats().get("field2").getDocCount(), equalTo(1L)); - - // user3 is granted access to field1 and field2: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareFieldStats() - .setFields("field1", "field2", "field3") - .get(); - assertThat(response.getAllFieldStats().size(), equalTo(2)); - assertThat(response.getAllFieldStats().get("field1").getDocCount(), equalTo(1L)); - assertThat(response.getAllFieldStats().get("field2").getDocCount(), equalTo(1L)); - - // user4 is granted access to no fields: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) - .prepareFieldStats() - .setFields("field1", "field2") - .get(); - assertThat(response.getAllFieldStats().size(), equalTo(0)); - - // user5 has no field level security configured: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) - .prepareFieldStats() - .setFields("field1", "field2", "field3") - .get(); - assertThat(response.getAllFieldStats().size(), equalTo(3)); - assertThat(response.getAllFieldStats().get("field1").getDocCount(), equalTo(1L)); - assertThat(response.getAllFieldStats().get("field2").getDocCount(), equalTo(1L)); - assertThat(response.getAllFieldStats().get("field3").getDocCount(), equalTo(1L)); - - // user6 has field level security configured for field*: - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) - .prepareFieldStats() - .setFields("field1", "field2", "field3") - .get(); - assertThat(response.getAllFieldStats().size(), equalTo(3)); - assertThat(response.getAllFieldStats().get("field1").getDocCount(), equalTo(1L)); - assertThat(response.getAllFieldStats().get("field2").getDocCount(), equalTo(1L)); - assertThat(response.getAllFieldStats().get("field3").getDocCount(), equalTo(1L)); - - // user7 has no field level security configured (roles with and without field level security): - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD))) - .prepareFieldStats() - .setFields("field1", "field2", "field3") - .get(); - assertThat(response.getAllFieldStats().size(), equalTo(3)); - assertThat(response.getAllFieldStats().get("field1").getDocCount(), equalTo(1L)); - assertThat(response.getAllFieldStats().get("field2").getDocCount(), equalTo(1L)); - assertThat(response.getAllFieldStats().get("field3").getDocCount(), equalTo(1L)); - - // user8 has field level security configured for field1 and field2 (multiple roles): - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user8", USERS_PASSWD))) - .prepareFieldStats() - .setFields("field1", "field2", "field3") - .get(); - assertThat(response.getAllFieldStats().size(), equalTo(2)); - assertThat(response.getAllFieldStats().get("field1").getDocCount(), equalTo(1L)); - assertThat(response.getAllFieldStats().get("field2").getDocCount(), equalTo(1L)); - } - public void testScroll() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setSettings(Settings.builder().put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true)) diff --git a/plugin/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java b/plugin/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java index cf5e4087ffc..ce4f037a9f4 100644 --- a/plugin/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java +++ b/plugin/src/test/java/org/elasticsearch/integration/IndexPrivilegeTests.java @@ -28,7 +28,7 @@ import static org.hamcrest.Matchers.is; //test is just too slow, please fix it to not be sleep-based @BadApple(bugUrl = "https://github.com/elastic/x-plugins/issues/1007") -@ESIntegTestCase.ClusterScope(randomDynamicTemplates = false, maxNumDataNodes = 2) +@ESIntegTestCase.ClusterScope(maxNumDataNodes = 2) public class IndexPrivilegeTests extends AbstractPrivilegeTestCase { private String jsonDoc = "{ \"name\" : \"elasticsearch\", \"body\": \"foo bar\" }"; diff --git a/plugin/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/plugin/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index 8d5bca8807d..6a42330a19d 100644 --- a/plugin/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/plugin/src/test/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -13,8 +13,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRespon import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.action.fieldstats.FieldStatsResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; @@ -22,9 +20,9 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.xpack.security.authc.support.Hasher; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; -import org.elasticsearch.test.SecurityIntegTestCase; import java.util.Locale; import java.util.Map; @@ -136,26 +134,6 @@ public class KibanaUserRoleIntegTests extends SecurityIntegTestCase { assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits(), multiHits); } - public void testFieldStats() throws Exception { - final String index = "logstash-20-12-2015"; - final String type = "event"; - final String field = "foo"; - indexRandom(true, client().prepareIndex().setIndex(index).setType(type).setSource(field, "bar")); - - FieldStatsResponse response = client().prepareFieldStats().setIndices(index).setFields(field).get(); - FieldStats fieldStats = response.getAllFieldStats().get(field); - assertThat(fieldStats, notNullValue()); - final String fieldStatsMax = fieldStats.getMaxValueAsString(); - - response = client() - .filterWithHeader(singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD))) - .prepareFieldStats() - .setIndices(index).setFields(field).get(); - FieldStats fieldStats1 = response.getAllFieldStats().get(field); - assertThat(fieldStats1, notNullValue()); - assertThat(fieldStats1.getMaxValueAsString(), equalTo(fieldStatsMax)); - } - public void testGetIndex() throws Exception { final String index = "logstash-20-12-2015"; final String type = "event"; diff --git a/plugin/src/test/java/org/elasticsearch/license/LicensingTests.java b/plugin/src/test/java/org/elasticsearch/license/LicensingTests.java index 0255cf5498e..d3ff87ca36a 100644 --- a/plugin/src/test/java/org/elasticsearch/license/LicensingTests.java +++ b/plugin/src/test/java/org/elasticsearch/license/LicensingTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.SecureString; @@ -159,6 +160,9 @@ public class LicensingTests extends SecurityIntegTestCase { assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); refresh(); + // wait for all replicas to be started (to make sure that there are no more cluster state updates when we disable licensing) + assertBusy(() -> assertTrue(client().admin().cluster().prepareState().get().getState().routingTable() + .shardsWithState(ShardRoutingState.INITIALIZING).isEmpty())); Client client = internalCluster().transportClient(); diff --git a/plugin/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java b/plugin/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java index 8f36175a652..2c149245a52 100644 --- a/plugin/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java @@ -35,7 +35,10 @@ public abstract class NativeRealmIntegTestCase extends SecurityIntegTestCase { @Before public void ensureNativeStoresStarted() throws Exception { assertSecurityIndexActive(); - setupReservedPasswords(); + if (shouldSetReservedUserPasswords()) { + ensureElasticPasswordBootstrapped(); + setupReservedPasswords(); + } } @After @@ -68,18 +71,14 @@ public abstract class NativeRealmIntegTestCase extends SecurityIntegTestCase { } public void setupReservedPasswords() throws IOException { - if (shouldSetReservedUserPasswords() == false) { - return; - } logger.info("setting up reserved passwords for test"); SecureString defaultPassword = new SecureString("".toCharArray()); - for (String username : Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, BeatsSystemUser.NAME, LogstashSystemUser.NAME)) { - SecureString authPassword = username.equals(ElasticUser.NAME) ? defaultPassword : reservedPassword; + for (String username : Arrays.asList(KibanaUser.NAME, BeatsSystemUser.NAME, LogstashSystemUser.NAME)) { String payload = "{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"; HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); BasicHeader authHeader = new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, authPassword)); + UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING)); String route = "/_xpack/security/user/" + username + "/_password"; Response response = getRestClient().performRequest("PUT", route, Collections.emptyMap(), entity, authHeader); } diff --git a/plugin/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/plugin/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index 028785d32f9..255bf4c0cbb 100644 --- a/plugin/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -6,6 +6,7 @@ package org.elasticsearch.test; import org.elasticsearch.AbstractOldXPackIndicesBackwardsCompatibilityTestCase; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -35,6 +36,7 @@ import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.security.InternalClient; import org.elasticsearch.xpack.security.Security; +import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.client.SecurityClient; import org.junit.AfterClass; import org.junit.Before; @@ -48,6 +50,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; import java.util.function.Function; import java.util.stream.Collectors; @@ -184,7 +187,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { Collection pluginNames = nodeInfo.getPlugins().getPluginInfos().stream().map(p -> p.getClassname()).collect(Collectors.toList()); assertThat("plugin [" + xpackPluginClass().getName() + "] not found in [" + pluginNames + "]", pluginNames, - hasItem(xpackPluginClass().getName())); + hasItem(xpackPluginClass().getName())); } } @@ -209,7 +212,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { Settings.Builder customBuilder = Settings.builder().put(customSettings); if (customBuilder.getSecureSettings() != null) { SecuritySettingsSource.addSecureSettings(builder, secureSettings -> - secureSettings.merge((MockSecureSettings) customBuilder.getSecureSettings())); + secureSettings.merge((MockSecureSettings) customBuilder.getSecureSettings())); } return builder.build(); } @@ -408,7 +411,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { } @Override - protected Function getClientWrapper() { + protected Function getClientWrapper() { Map headers = Collections.singletonMap("Authorization", basicAuthHeaderValue(nodeClientUsername(), nodeClientPassword())); // we need to wrap node clients because we do not specify a user for nodes and all requests will use the system @@ -445,7 +448,11 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { } public void assertSecurityIndexActive() throws Exception { - for (ClusterService clusterService : internalCluster().getInstances(ClusterService.class)) { + assertSecurityIndexActive(internalCluster()); + } + + public void assertSecurityIndexActive(InternalTestCluster internalTestCluster) throws Exception { + for (ClusterService clusterService : internalTestCluster.getInstances(ClusterService.class)) { assertBusy(() -> { ClusterState clusterState = clusterService.state(); assertFalse(clusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)); @@ -461,6 +468,29 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { } } + public void ensureElasticPasswordBootstrapped() throws Exception { + ensureElasticPasswordBootstrapped(internalCluster()); + } + + public void ensureElasticPasswordBootstrapped(InternalTestCluster internalTestCluster) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + SecureString testPasswordHashed = new SecureString(SecuritySettingsSource.TEST_PASSWORD_HASHED.toCharArray()); + ReservedRealm reservedRealm = internalTestCluster.getInstances(ReservedRealm.class).iterator().next(); + reservedRealm.bootstrapElasticUserCredentials(testPasswordHashed, new ActionListener() { + @Override + public void onResponse(Boolean passwordSet) { + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + logger.error("Exception attempting to bootstrap password for test", e); + fail("Failed to bootstrap elastic password for test due to exception: " + e.getMessage()); + } + }); + latch.await(); + } + public void assertSecurityIndexWriteable() throws Exception { for (ClusterService clusterService : internalCluster().getInstances(ClusterService.class)) { assertBusy(() -> { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/common/text/TextTemplateTests.java b/plugin/src/test/java/org/elasticsearch/xpack/common/text/TextTemplateTests.java index 17a08024331..d4e84c67370 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/common/text/TextTemplateTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/common/text/TextTemplateTests.java @@ -62,7 +62,7 @@ public class TextTemplateTests extends ESTestCase { } }; - when(service.compile(new Script(type, lang, templateText, + when(service.compile(new Script(type, type == ScriptType.STORED ? null : lang, templateText, type == ScriptType.INLINE ? Collections.singletonMap("content_type", "text/plain") : null, merged), Watcher.SCRIPT_TEMPLATE_CONTEXT)).thenReturn(compiledTemplate); @@ -84,7 +84,7 @@ public class TextTemplateTests extends ESTestCase { } }; - when(service.compile(new Script(type, lang, templateText, + when(service.compile(new Script(type, type == ScriptType.STORED ? null : lang, templateText, type == ScriptType.INLINE ? Collections.singletonMap("content_type", "text/plain") : null, model), Watcher.SCRIPT_TEMPLATE_CONTEXT)).thenReturn(compiledTemplate); @@ -114,7 +114,8 @@ public class TextTemplateTests extends ESTestCase { public void testParser() throws Exception { ScriptType type = randomScriptType(); - TextTemplate template = templateBuilder(type, "_template", singletonMap("param_key", "param_val")); + TextTemplate template = + templateBuilder(type, "_template", singletonMap("param_key", "param_val")); XContentBuilder builder = jsonBuilder().startObject(); switch (type) { case INLINE: @@ -134,7 +135,9 @@ public class TextTemplateTests extends ESTestCase { } public void testParserParserSelfGenerated() throws Exception { - TextTemplate template = templateBuilder(randomScriptType(), "_template", singletonMap("param_key", "param_val")); + ScriptType type = randomScriptType(); + TextTemplate template = + templateBuilder(type, "_template", singletonMap("param_key", "param_val")); XContentBuilder builder = jsonBuilder().value(template); BytesReference bytes = builder.bytes(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java b/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java index d6f12c58e9e..9f6248f4913 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationChecksTests.java @@ -36,33 +36,4 @@ public class DeprecationChecksTests extends ESTestCase { List filteredIssues = DeprecationChecks.filterChecks(checks, Supplier::get); assertThat(filteredIssues.size(), equalTo(numChecksFailed)); } - - public void testCoerceBooleanDeprecation() throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder(); - mapping.startObject(); { - mapping.startObject("properties"); { - mapping.startObject("my_boolean"); { - mapping.field("type", "boolean"); - } - mapping.endObject(); - } - mapping.endObject(); - } - mapping.endObject(); - - IndexMetaData indexMetaData = IndexMetaData.builder("test") - .putMapping("testBooleanCoercion", mapping.string()) - .settings(settings(Version.V_5_6_0)) - .numberOfShards(1) - .numberOfReplicas(0) - .build(); - DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.INFO, - "Coercion of boolean fields", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_mappings_changes.html#_coercion_of_boolean_fields", - Arrays.toString(new String[] { "type: testBooleanCoercion, field: my_boolean" })); - List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); - assertThat(issues.size(), equalTo(1)); - assertThat(issues.get(0), equalTo(expected)); - } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java b/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java index c27f81a7343..b79ff697063 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/deprecation/DeprecationInfoActionResponseTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.deprecation; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -76,6 +77,9 @@ public class DeprecationInfoActionResponseTests extends AbstractStreamableTestCa List nodeInfos = Collections.singletonList(new NodeInfo(Version.CURRENT, Build.CURRENT, discoveryNode, null, null, null, null, null, null, null, null, null, null)); + List nodeStats = Collections.singletonList(new NodeStats(discoveryNode, 0L, null, + null, null, null, null, null, null, null, null, + null, null, null)); IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(Settings.EMPTY); IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, true); @@ -83,13 +87,13 @@ public class DeprecationInfoActionResponseTests extends AbstractStreamableTestCa boolean nodeIssueFound = randomBoolean(); boolean indexIssueFound = randomBoolean(); DeprecationIssue foundIssue = DeprecationIssueTests.createTestInstance(); - List, ClusterState, DeprecationIssue>> clusterSettingsChecks = + List> clusterSettingsChecks = Collections.unmodifiableList(Arrays.asList( - (ln, s) -> clusterIssueFound ? foundIssue : null + (s) -> clusterIssueFound ? foundIssue : null )); - List, ClusterState, DeprecationIssue>> nodeSettingsChecks = + List, List, DeprecationIssue>> nodeSettingsChecks = Collections.unmodifiableList(Arrays.asList( - (ln, s) -> nodeIssueFound ? foundIssue : null + (ln, ls) -> nodeIssueFound ? foundIssue : null )); List> indexSettingsChecks = @@ -97,7 +101,7 @@ public class DeprecationInfoActionResponseTests extends AbstractStreamableTestCa (idx) -> indexIssueFound ? foundIssue : null )); - DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from(nodeInfos, state, + DeprecationInfoAction.Response response = DeprecationInfoAction.Response.from(nodeInfos, nodeStats, state, resolver, Strings.EMPTY_ARRAY, indicesOptions, clusterSettingsChecks, nodeSettingsChecks, indexSettingsChecks); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/plugin/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java new file mode 100644 index 00000000000..301ce9b53ae --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -0,0 +1,233 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.List; + +import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; +import static org.hamcrest.core.IsEqual.equalTo; + +public class IndexDeprecationChecksTests extends ESTestCase { + + private void assertSettingsAndIssue(String key, String value, DeprecationIssue expected) { + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings(Version.V_5_6_0) + .put(key, value)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); + assertEquals(singletonList(expected), issues); + } + + public void testCoerceBooleanDeprecation() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder(); + mapping.startObject(); { + mapping.startObject("_all"); { + mapping.field("enabled", false); + } + mapping.endObject(); + mapping.startObject("properties"); { + mapping.startObject("my_boolean"); { + mapping.field("type", "boolean"); + } + mapping.endObject(); + mapping.startObject("my_object"); { + mapping.startObject("properties"); { + mapping.startObject("my_inner_boolean"); { + mapping.field("type", "boolean"); + } + mapping.endObject(); + mapping.startObject("my_text"); { + mapping.field("type", "text"); + mapping.startObject("fields"); { + mapping.startObject("raw"); { + mapping.field("type", "boolean"); + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .putMapping("testBooleanCoercion", mapping.string()) + .settings(settings(Version.V_5_6_0)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.INFO, + "Coercion of boolean fields", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_mappings_changes.html#_coercion_of_boolean_fields", + "[[type: testBooleanCoercion, field: my_boolean], [type: testBooleanCoercion, field: my_inner_boolean]," + + " [type: testBooleanCoercion, field: my_text, multifield: raw]]"); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); + assertEquals(singletonList(expected), issues); + } + + public void testAllMetaFieldIsDisabledByDefaultCheck() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder(); + mapping.startObject(); { + mapping.startObject("_all"); { + mapping.field("enabled", randomFrom("1", 1, "true", true)); + } + mapping.endObject(); + } + mapping.endObject(); + + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .putMapping("testAllEnabled", mapping.string()) + .settings(settings(Version.V_5_6_0)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.INFO, + "The _all meta field is disabled by default on indices created in 6.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_mappings_changes.html#_the_literal__all_literal_meta_field_is_now_disabled_by_default", + "types: [testAllEnabled]"); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); + assertEquals(singletonList(expected), issues); + } + + public void testIncludeInAllCheck() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder(); + mapping.startObject(); { + mapping.startObject("_all"); { + mapping.field("enabled", false); + } + mapping.endObject(); + mapping.startObject("properties"); { + mapping.startObject("my_field"); { + mapping.field("type", "text"); + mapping.field("include_in_all", false); + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .putMapping("testIncludeInAll", mapping.string()) + .settings(settings(Version.V_5_6_0)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "The [include_in_all] mapping parameter is now disallowed", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_mappings_changes.html#_the_literal_include_in_all_literal_mapping_parameter_is_now_disallowed", + "[[type: testIncludeInAll, field: my_field]]"); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); + assertEquals(singletonList(expected), issues); + } + + public void testMatchMappingTypeCheck() throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder(); + mapping.startObject(); { + mapping.startObject("_all"); { + mapping.field("enabled", false); + } + mapping.endObject(); + mapping.startArray("dynamic_templates"); + { + mapping.startObject(); + { + mapping.startObject("integers"); + { + mapping.field("match_mapping_type", "UNKNOWN_VALUE"); + mapping.startObject("mapping"); + { + mapping.field("type", "integer"); + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endArray(); + } + mapping.endObject(); + + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .putMapping("test", mapping.string()) + .settings(settings(Version.V_5_6_0)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + + DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "Unrecognized match_mapping_type options not silently ignored", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_mappings_changes.html#_unrecognized_literal_match_mapping_type_literal_options_not_silently_ignored", + "[type: test, dynamicFieldDefinitionintegers, unknown match_mapping_type[UNKNOWN_VALUE]]"); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); + assertEquals(singletonList(expected), issues); + } + + public void testBaseSimilarityDefinedCheck() { + assertSettingsAndIssue("index.similarity.base.type", "classic", + new DeprecationIssue(DeprecationIssue.Level.WARNING, + "The base similarity is now ignored as coords and query normalization have been removed." + + "If provided, this setting will be ignored and issue a deprecation warning", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_settings_changes.html#_similarity_settings", null)); + } + + public void testIndexStoreTypeCheck() { + assertSettingsAndIssue("index.store.type", "niofs", + new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "The default index.store.type has been removed. If you were using it, " + + "we advise that you simply remove it from your index settings and Elasticsearch" + + "will use the best store implementation for your operating system.", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_settings_changes.html#_store_settings", null)); + } + public void testStoreThrottleSettingsCheck() { + assertSettingsAndIssue("index.store.throttle.max_bytes_per_sec", "32", + new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "index.store.throttle settings are no longer recognized. these settings should be removed", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_settings_changes.html#_store_throttling_settings", + "present settings: [index.store.throttle.max_bytes_per_sec]")); + assertSettingsAndIssue("index.store.throttle.type", "none", + new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "index.store.throttle settings are no longer recognized. these settings should be removed", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_settings_changes.html#_store_throttling_settings", + "present settings: [index.store.throttle.type]")); + } + + public void testSharedFileSystemSettingsCheck() { + assertSettingsAndIssue("index.shared_filesystem", "true", + new DeprecationIssue(DeprecationIssue.Level.CRITICAL, + "[index.shared_filesystem] setting should be removed", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + + "breaking_60_settings_changes.html#_shadow_replicas_have_been_removed", null)); + } +} \ No newline at end of file diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/OpenJobActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/OpenJobActionTests.java index ed7cbdf0319..45c3847ace1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/action/OpenJobActionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/action/OpenJobActionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -26,6 +27,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlMetaIndex; import org.elasticsearch.xpack.ml.MlMetadata; @@ -33,11 +35,13 @@ import org.elasticsearch.xpack.ml.job.config.Job; import org.elasticsearch.xpack.ml.job.config.JobState; import org.elasticsearch.xpack.ml.job.config.JobTaskStatus; import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndex; +import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData.Assignment; +import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; import java.util.Collections; @@ -339,6 +343,79 @@ public class OpenJobActionTests extends ESTestCase { assertEquals(indexToRemove, result.get(0)); } + public void testMappingRequiresUpdateNoMapping() throws IOException { + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + ClusterState cs = csBuilder.build(); + String[] indices = new String[] { "no_index" }; + + assertArrayEquals(new String[] { "no_index" }, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + } + + public void testMappingRequiresUpdateNullMapping() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("null_mapping", null)); + String[] indices = new String[] { "null_index" }; + assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + } + + public void testMappingRequiresUpdateNoVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("no_version_field", "NO_VERSION_FIELD")); + String[] indices = new String[] { "no_version_field" }; + assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + } + + public void testMappingRequiresUpdateRecentMappingVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_current", Version.CURRENT.toString())); + String[] indices = new String[] { "version_current" }; + assertArrayEquals(new String[] {}, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + } + + public void testMappingRequiresUpdateMaliciousMappingVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData( + Collections.singletonMap("version_current", Collections.singletonMap("nested", "1.0"))); + String[] indices = new String[] { "version_nested" }; + assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + } + + public void testMappingRequiresUpdateOldMappingVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_54", Version.V_5_4_0.toString())); + String[] indices = new String[] { "version_54" }; + assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + } + + public void testMappingRequiresUpdateBogusMappingVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_bogus", "0.0")); + String[] indices = new String[] { "version_bogus" }; + assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + } + + public void testMappingRequiresUpdateNewerMappingVersion() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_newer", Version.CURRENT)); + String[] indices = new String[] { "version_newer" }; + assertArrayEquals(new String[] {}, OpenJobAction.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousVersion(), logger)); + } + + public void testMappingRequiresUpdateNewerMappingVersionMinor() throws IOException { + ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_newer_minor", Version.CURRENT)); + String[] indices = new String[] { "version_newer_minor" }; + assertArrayEquals(new String[] {}, + OpenJobAction.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousMinorVersion(), logger)); + } + + public void testMappingRequiresUpdateSomeVersionMix() throws IOException { + Map versionMix = new HashMap(); + versionMix.put("version_54", Version.V_5_4_0); + versionMix.put("version_current", Version.CURRENT); + versionMix.put("version_null", null); + versionMix.put("version_current2", Version.CURRENT); + versionMix.put("version_bogus", "0.0.0"); + versionMix.put("version_current3", Version.CURRENT); + versionMix.put("version_bogus2", "0.0.0"); + + ClusterState cs = getClusterStateWithMappingsWithMetaData(versionMix); + String[] indices = new String[] { "version_54", "version_null", "version_bogus", "version_bogus2" }; + assertArrayEquals(indices, OpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); + } + public static void addJobTask(String jobId, String nodeId, JobState jobState, PersistentTasksCustomMetaData.Builder builder) { builder.addTask(MlMetadata.jobTaskId(jobId), OpenJobAction.TASK_NAME, new OpenJobAction.JobParams(jobId), new Assignment(nodeId, "test assignment")); @@ -384,4 +461,40 @@ public class OpenJobActionTests extends ESTestCase { metaData.putCustom(MlMetadata.TYPE, mlMetadata.build()); } + private ClusterState getClusterStateWithMappingsWithMetaData(Map namesAndVersions) throws IOException { + MetaData.Builder metaDataBuilder = MetaData.builder(); + + for (Map.Entry entry : namesAndVersions.entrySet()) { + + String indexName = entry.getKey(); + Object version = entry.getValue(); + + IndexMetaData.Builder indexMetaData = IndexMetaData.builder(indexName); + indexMetaData.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); + + Map mapping = new HashMap<>(); + Map properties = new HashMap<>(); + for (int i = 0; i < 10; i++) { + properties.put("field" + i, Collections.singletonMap("type", "string")); + } + mapping.put("properties", properties); + + Map meta = new HashMap<>(); + if (version != null && version.equals("NO_VERSION_FIELD") == false) { + meta.put("version", version); + } + mapping.put("_meta", meta); + + indexMetaData.putMapping(new MappingMetaData(ElasticsearchMappings.DOC_TYPE, mapping)); + + metaDataBuilder.put(indexMetaData); + } + MetaData metaData = metaDataBuilder.build(); + + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.metaData(metaData); + return csBuilder.build(); + } + } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 35d0501a915..f5de544837a 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -32,15 +32,14 @@ import static org.hamcrest.Matchers.equalTo; public class DatafeedJobsRestIT extends ESRestTestCase { - private static final String BASIC_AUTH_VALUE_ELASTIC = - basicAuthHeaderValue("elastic", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); + private static final String BASIC_AUTH_VALUE_SUPER_USER = + basicAuthHeaderValue("x_pack_rest_user", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); private static final String BASIC_AUTH_VALUE_ML_ADMIN = basicAuthHeaderValue("ml_admin", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); @Override protected Settings restClientSettings() { - return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", - BASIC_AUTH_VALUE_ELASTIC).build(); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", BASIC_AUTH_VALUE_SUPER_USER).build(); } @Override @@ -51,10 +50,6 @@ public class DatafeedJobsRestIT extends ESRestTestCase { @Before public void setUpData() throws Exception { String password = new String(SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING.getChars()); - String elasticUserPayload = "{\"password\" : \"" + password + "\"}"; - - client().performRequest("put", "_xpack/security/user/elastic/_password", Collections.emptyMap(), - new StringEntity(elasticUserPayload, ContentType.APPLICATION_JSON)); // This user has admin rights on machine learning, but (importantly for the tests) no // rights on any of the data indexes @@ -310,7 +305,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase { new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs", "response").build(); // This should be disallowed, because ml_admin is trying to preview a datafeed created by - // by another user (elastic in this case) that will reveal the content of an index they + // by another user (x_pack_rest_user in this case) that will reveal the content of an index they // don't have permission to search directly ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest("get", @@ -581,7 +576,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase { boolean source; String scriptedFields; String aggregations; - String authHeader = BASIC_AUTH_VALUE_ELASTIC; + String authHeader = BASIC_AUTH_VALUE_SUPER_USER; DatafeedBuilder(String datafeedId, String jobId, String index, String type) { this.datafeedId = datafeedId; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 513c1af4c6c..5e9d50f1eab 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -36,7 +36,8 @@ import static org.hamcrest.Matchers.not; public class MlJobIT extends ESRestTestCase { - private static final String BASIC_AUTH_VALUE = basicAuthHeaderValue("elastic", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); + private static final String BASIC_AUTH_VALUE = basicAuthHeaderValue("x_pack_rest_user", + SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); @Override protected Settings restClientSettings() { @@ -339,9 +340,17 @@ public class MlJobIT extends ESRestTestCase { "}"; String jobId1 = "job-with-response-field"; - String byFieldName1 = "response"; + String byFieldName1; String jobId2 = "job-will-fail-with-mapping-error-on-response-field"; - String byFieldName2 = "response.time"; + String byFieldName2; + // we should get the friendly advice nomatter which way around the clashing fields are seen + if (randomBoolean()) { + byFieldName1 = "response"; + byFieldName2 = "response.time"; + } else { + byFieldName1 = "response.time"; + byFieldName2 = "response"; + } String jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName1); Response response = client().performRequest("put", MachineLearning.BASE_PATH @@ -354,8 +363,8 @@ public class MlJobIT extends ESRestTestCase { Collections.emptyMap(), new StringEntity(failingJobConfig, ContentType.APPLICATION_JSON))); assertThat(e.getMessage(), - containsString("A field has a different mapping type to an existing field with the same name. " + - "Use the 'results_index_name' setting to assign the job to another index")); + containsString("This job would cause a mapping clash with existing field [response] - " + + "avoid the clash by assigning a dedicated results index")); } public void testDeleteJob() throws Exception { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 4a487066d3d..c2c0361ec92 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -81,7 +81,7 @@ abstract class MlNativeAutodetectIntegTestCase extends SecurityIntegTestCase { protected Settings externalClusterClientSettings() { Settings.Builder builder = Settings.builder(); builder.put(NetworkModule.TRANSPORT_TYPE_KEY, Security.NAME4); - builder.put(Security.USER_SETTING.getKey(), "elastic:" + SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); + builder.put(Security.USER_SETTING.getKey(), "x_pack_rest_user:" + SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); builder.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), true); return builder.build(); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlRestTestStateCleaner.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlRestTestStateCleaner.java index 62504dd67e9..97f0c3c9709 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlRestTestStateCleaner.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/integration/MlRestTestStateCleaner.java @@ -41,7 +41,7 @@ public class MlRestTestStateCleaner { deleteAllDatafeeds(); deleteAllJobs(); waitForPendingTasks(); - deleteDotML(); + // indices will be deleted by the ESIntegTestCase class } private void waitForPendingTasks() throws Exception { @@ -152,11 +152,4 @@ public class MlRestTestStateCleaner { } } } - - private void deleteDotML() throws IOException { - int statusCode = adminClient.performRequest("DELETE", ".ml-*?ignore_unavailable=true").getStatusLine().getStatusCode(); - if (statusCode != 200) { - logger.error("Got status code " + statusCode + " when deleting .ml-* indexes"); - } - } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfigTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfigTests.java index 0973a1eaf27..667629e27b9 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfigTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ml/job/config/AnalysisConfigTests.java @@ -308,7 +308,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase returnResponse = invocation -> { @SuppressWarnings("unchecked") diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java index d822c1efaa2..a0dc3d5ed4e 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityLifecycleServiceTests.java @@ -395,6 +395,7 @@ public class SecurityLifecycleServiceTests extends ESTestCase { clusterStateBuilder.metaData(metaDataBuilder); } + clusterStateBuilder.routingTable(SecurityTestUtils.buildIndexRoutingTable(securityIndexName)); return clusterStateBuilder; } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTribeIT.java b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTribeIT.java index f86511ed037..4b5990873f7 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTribeIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/SecurityTribeIT.java @@ -5,16 +5,9 @@ */ package org.elasticsearch.xpack.security; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHost; -import org.apache.http.entity.ContentType; -import org.apache.http.message.BasicHeader; -import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.service.ClusterService; @@ -29,7 +22,6 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; -import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; @@ -40,14 +32,11 @@ import org.elasticsearch.xpack.security.action.role.PutRoleResponse; import org.elasticsearch.xpack.security.action.user.PutUserResponse; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.security.client.SecurityClient; -import org.elasticsearch.xpack.security.user.ElasticUser; import org.elasticsearch.xpack.security.support.IndexLifecycleManager; - import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; -import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -58,7 +47,6 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.function.Predicate; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.containsString; @@ -102,11 +90,7 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { cluster2.beforeTest(random(), 0.1); cluster2.ensureAtLeastNumDataNodes(2); } - } - - @Override - public boolean shouldSetReservedUserPasswords() { - return false; + assertSecurityIndexActive(cluster2); } @Override @@ -132,7 +116,7 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { public void tearDownTribeNodeAndWipeCluster() throws Exception { if (cluster2 != null) { try { - cluster2.wipe(Collections.emptySet()); + cluster2.wipe(Collections.emptySet()); try { // this is a hack to clean up the .security index since only the XPack user or superusers can delete it cluster2.getInstance(InternalClient.class) @@ -160,7 +144,12 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { return true; } - private void setupTribeNode(Settings settings) throws NodeValidationException, InterruptedException { + @Override + protected boolean shouldSetReservedUserPasswords() { + return false; + } + + private void setupTribeNode(Settings settings) throws Exception { SecuritySettingsSource cluster2SettingsSource = new SecuritySettingsSource(1, useGeneratedSSL, createTempDir(), Scope.TEST) { @Override @@ -171,8 +160,9 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { .build(); } }; + final Settings settingsTemplate = cluster2SettingsSource.nodeSettings(0); - Map asMap = new HashMap<>(cluster2SettingsSource.nodeSettings(0).getAsMap()); + Map asMap = new HashMap<>(settingsTemplate.getAsMap()); asMap.remove(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey()); Settings.Builder tribe1Defaults = Settings.builder(); Settings.Builder tribe2Defaults = Settings.builder(); @@ -187,7 +177,7 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { } // TODO: rethink how these settings are generated for tribes once we support more than just string settings... MockSecureSettings secureSettingsTemplate = - (MockSecureSettings) Settings.builder().put(cluster2SettingsSource.nodeSettings(0)).getSecureSettings(); + (MockSecureSettings) Settings.builder().put(settingsTemplate).getSecureSettings(); MockSecureSettings secureSettings = new MockSecureSettings(); if (secureSettingsTemplate != null) { for (String settingName : secureSettingsTemplate.getSettingNames()) { @@ -246,26 +236,14 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { }, nodeCountPredicate); latch.await(); } + + assertTribeNodeHasAllIndices(); } public void testThatTribeCanAuthenticateElasticUser() throws Exception { - InetSocketAddress[] inetSocketAddresses = cluster2.httpAddresses(); - List hosts = new ArrayList<>(); - for (InetSocketAddress address : inetSocketAddresses) { - hosts.add(new HttpHost(address.getAddress(), address.getPort())); - } - RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[hosts.size()])); - RestClient client = builder.build(); - - String payload = "{\"password\": \"" + SecuritySettingsSource.TEST_PASSWORD + "\"}"; - HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); - BasicHeader authHeader = new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, new SecureString("".toCharArray()))); - String route = "/_xpack/security/user/" + ElasticUser.NAME + "/_password"; - client.performRequest("PUT", route, Collections.emptyMap(), entity, authHeader); - client.close(); - setupTribeNode(Settings.EMPTY); + ensureElasticPasswordBootstrapped(internalCluster()); + assertTribeNodeHasAllIndices(); ClusterHealthResponse response = tribeClient.filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("elastic", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING))) .admin().cluster().prepareHealth().get(); @@ -274,8 +252,9 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { public void testThatTribeCanAuthenticateElasticUserWithChangedPassword() throws Exception { setupTribeNode(Settings.EMPTY); - Client clusterClient = randomBoolean() ? client() : cluster2.client(); - securityClient(clusterClient).prepareChangePassword("elastic", "password".toCharArray()).get(); + InternalTestCluster cluster = randomBoolean() ? internalCluster() : cluster2; + ensureElasticPasswordBootstrapped(cluster); + securityClient(cluster.client()).prepareChangePassword("elastic", "password".toCharArray()).get(); assertTribeNodeHasAllIndices(); ClusterHealthResponse response = tribeClient.filterWithHeader(Collections.singletonMap("Authorization", @@ -286,6 +265,8 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { public void testThatTribeClustersHaveDifferentPasswords() throws Exception { setupTribeNode(Settings.EMPTY); + ensureElasticPasswordBootstrapped(internalCluster()); + ensureElasticPasswordBootstrapped(cluster2); securityClient().prepareChangePassword("elastic", "password".toCharArray()).get(); securityClient(cluster2.client()).prepareChangePassword("elastic", "password2".toCharArray()).get(); @@ -297,6 +278,9 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { } public void testUsersInBothTribes() throws Exception { + ensureElasticPasswordBootstrapped(internalCluster()); + ensureElasticPasswordBootstrapped(cluster2); + final String preferredTribe = randomBoolean() ? "t1" : "t2"; setupTribeNode(Settings.builder().put("tribe.on_conflict", "prefer_" + preferredTribe).build()); final int randomUsers = scaledRandomIntBetween(3, 8); @@ -305,10 +289,7 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { List shouldBeSuccessfulUsers = new ArrayList<>(); List shouldFailUsers = new ArrayList<>(); final Client preferredClient = "t1".equals(preferredTribe) ? cluster1Client : cluster2Client; - // always ensure the index exists on all of the clusters in this test - assertAcked(internalClient().admin().indices().prepareCreate(IndexLifecycleManager.INTERNAL_SECURITY_INDEX).get()); - assertAcked(cluster2.getInstance(InternalClient.class).admin().indices() - .prepareCreate(IndexLifecycleManager.INTERNAL_SECURITY_INDEX).get()); + for (int i = 0; i < randomUsers; i++) { final String username = "user" + i; Client clusterClient = randomBoolean() ? cluster1Client : cluster2Client; @@ -346,16 +327,16 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { final String preferredTribe = randomBoolean() ? "t1" : "t2"; setupTribeNode(Settings.builder().put("tribe.on_conflict", "prefer_" + preferredTribe).build()); final int randomUsers = scaledRandomIntBetween(3, 8); - final Client cluster1Client = client(); - final Client cluster2Client = cluster2.client(); + List shouldBeSuccessfulUsers = new ArrayList<>(); // only create users in the non preferred client - final Client nonPreferredClient = "t1".equals(preferredTribe) ? cluster2Client : cluster1Client; + final InternalTestCluster nonPreferredCluster = "t1".equals(preferredTribe) ? cluster2 : internalCluster(); + ensureElasticPasswordBootstrapped(nonPreferredCluster); for (int i = 0; i < randomUsers; i++) { final String username = "user" + i; PutUserResponse response = - securityClient(nonPreferredClient).preparePutUser(username, "password".toCharArray(), "superuser").get(); + securityClient(nonPreferredCluster.client()).preparePutUser(username, "password".toCharArray(), "superuser").get(); assertTrue(response.created()); shouldBeSuccessfulUsers.add(username); } @@ -370,6 +351,8 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { } public void testUserModificationUsingTribeNodeAreDisabled() throws Exception { + ensureElasticPasswordBootstrapped(internalCluster()); + setupTribeNode(Settings.EMPTY); SecurityClient securityClient = securityClient(tribeClient); UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, @@ -385,6 +368,9 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { } public void testRetrieveRolesOnTribeNode() throws Exception { + ensureElasticPasswordBootstrapped(internalCluster()); + ensureElasticPasswordBootstrapped(cluster2); + final String preferredTribe = randomBoolean() ? "t1" : "t2"; setupTribeNode(Settings.builder().put("tribe.on_conflict", "prefer_" + preferredTribe).build()); final int randomRoles = scaledRandomIntBetween(3, 8); @@ -393,10 +379,6 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { List shouldBeSuccessfulRoles = new ArrayList<>(); List shouldFailRoles = new ArrayList<>(); final Client preferredClient = "t1".equals(preferredTribe) ? cluster1Client : cluster2Client; - // always ensure the index exists on all of the clusters in this test - assertAcked(internalClient().admin().indices().prepareCreate(IndexLifecycleManager.INTERNAL_SECURITY_INDEX).get()); - assertAcked(cluster2.getInstance(InternalClient.class).admin().indices() - .prepareCreate(IndexLifecycleManager.INTERNAL_SECURITY_INDEX).get()); for (int i = 0; i < randomRoles; i++) { final String rolename = "role" + i; @@ -432,10 +414,10 @@ public class SecurityTribeIT extends NativeRealmIntegTestCase { final String preferredTribe = randomBoolean() ? "t1" : "t2"; setupTribeNode(Settings.builder().put("tribe.on_conflict", "prefer_" + preferredTribe).build()); final int randomRoles = scaledRandomIntBetween(3, 8); - final Client cluster1Client = client(); - final Client cluster2Client = cluster2.client(); List shouldBeSuccessfulRoles = new ArrayList<>(); - final Client nonPreferredClient = "t1".equals(preferredTribe) ? cluster2Client : cluster1Client; + final InternalTestCluster nonPreferredCluster = "t1".equals(preferredTribe) ? cluster2 : internalCluster(); + ensureElasticPasswordBootstrapped(nonPreferredCluster); + Client nonPreferredClient = nonPreferredCluster.client(); for (int i = 0; i < randomRoles; i++) { final String rolename = "role" + i; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java new file mode 100644 index 00000000000..7c254723868 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security; + +import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.cluster.metadata.TemplateUpgradeService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESIntegTestCase.ClusterScope; +import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.UnaryOperator; +import java.util.stream.Collectors; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.not; + +/** + * This test ensures, that the plugin template upgrader can add and remove + * templates when started within security, as this requires certain + * system priviliges + */ +@ClusterScope(maxNumDataNodes = 1, scope = Scope.SUITE, numClientNodes = 0) +public class TemplateUpgraderTests extends SecurityIntegTestCase { + + public void testTemplatesWorkAsExpected() throws Exception { + ClusterService clusterService = internalCluster().getInstance(ClusterService.class, internalCluster().getMasterName()); + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, internalCluster().getMasterName()); + Client client = internalCluster().getInstance(Client.class, internalCluster().getMasterName()); + UnaryOperator> indexTemplateMetaDataUpgraders = map -> { + map.remove("removed-template"); + map.put("added-template", IndexTemplateMetaData.builder("added-template") + .order(1) + .patterns(Collections.singletonList(randomAlphaOfLength(10))).build()); + return map; + }; + + PutIndexTemplateResponse putIndexTemplateResponse = client().admin().indices().preparePutTemplate("removed-template") + .setOrder(1) + .setPatterns(Collections.singletonList(randomAlphaOfLength(10))) + .get(); + assertAcked(putIndexTemplateResponse); + assertTemplates("removed-template", "added-template"); + + TemplateUpgradeService templateUpgradeService = new TemplateUpgradeService(Settings.EMPTY, client, clusterService, threadPool, + Collections.singleton(indexTemplateMetaDataUpgraders)); + + // ensure the cluster listener gets triggered + ClusterChangedEvent event = new ClusterChangedEvent("testing", clusterService.state(), clusterService.state()); + templateUpgradeService.clusterChanged(event); + + assertBusy(() -> assertTemplates("added-template", "removed-template")); + } + + private void assertTemplates(String existingTemplate, String deletedTemplate) { + GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get(); + List templateNames = response.getIndexTemplates().stream().map(IndexTemplateMetaData::name).collect(Collectors.toList()); + assertThat(templateNames, hasItem(existingTemplate)); + assertThat(templateNames, not(hasItem(deletedTemplate))); + } +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 65e96563636..a05c221a38b 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -5,6 +5,18 @@ */ package org.elasticsearch.xpack.security.authc; +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.time.Clock; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; + import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; @@ -46,18 +58,6 @@ import org.elasticsearch.xpack.security.user.User; import org.junit.After; import org.junit.Before; -import java.io.IOException; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.time.Clock; -import java.util.Arrays; -import java.util.Base64; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicBoolean; - import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException; import static org.elasticsearch.xpack.security.support.Exceptions.authenticationError; import static org.hamcrest.Matchers.arrayContaining; @@ -215,7 +215,7 @@ public class AuthenticationServiceTests extends ESTestCase { }, this::logAndFail)); verify(auditTrail).authenticationSuccess(secondRealm.name(), user, "_action", message); verifyNoMoreInteractions(auditTrail); - verify(firstRealm, never()).authenticate(eq(token), any(ActionListener.class), any(IncomingRequest.class)); + verify(firstRealm, never()).authenticate(eq(token), any(ActionListener.class)); assertTrue(completed.get()); } @@ -571,7 +571,7 @@ public class AuthenticationServiceTests extends ESTestCase { when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); doThrow(authenticationError("realm doesn't like authenticate")) - .when(secondRealm).authenticate(eq(token), any(ActionListener.class), any(IncomingRequest.class)); + .when(secondRealm).authenticate(eq(token), any(ActionListener.class)); try { authenticateBlocking("_action", message, null); fail("exception should bubble out"); @@ -586,7 +586,7 @@ public class AuthenticationServiceTests extends ESTestCase { when(secondRealm.token(threadContext)).thenReturn(token); when(secondRealm.supports(token)).thenReturn(true); doThrow(authenticationError("realm doesn't like authenticate")) - .when(secondRealm).authenticate(eq(token), any(ActionListener.class), any(IncomingRequest.class)); + .when(secondRealm).authenticate(eq(token), any(ActionListener.class)); try { authenticateBlocking(restRequest); fail("exception should bubble out"); @@ -876,9 +876,13 @@ public class AuthenticationServiceTests extends ESTestCase { private void mockAuthenticate(Realm realm, AuthenticationToken token, User user) { doAnswer((i) -> { ActionListener listener = (ActionListener) i.getArguments()[1]; - listener.onResponse(user); + if (user == null) { + listener.onResponse(AuthenticationResult.notHandled()); + } else { + listener.onResponse(AuthenticationResult.success(user)); + } return null; - }).when(realm).authenticate(eq(token), any(ActionListener.class), any(IncomingRequest.class)); + }).when(realm).authenticate(eq(token), any(ActionListener.class)); } private Authentication authenticateBlocking(RestRequest restRequest) { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java new file mode 100644 index 00000000000..e68b709a280 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/InternalRealmsTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.authc.esnative.NativeRealm; +import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.ssl.SSLService; + +import java.util.Map; +import java.util.function.BiConsumer; + +import static org.elasticsearch.mock.orig.Mockito.times; +import static org.hamcrest.Matchers.any; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.is; +import static org.mockito.Matchers.isA; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; + +public class InternalRealmsTests extends ESTestCase { + + public void testNativeRealmRegistersIndexHealthChangeListener() throws Exception { + SecurityLifecycleService lifecycleService = mock(SecurityLifecycleService.class); + Map factories = InternalRealms.getFactories(mock(ThreadPool.class), mock(ResourceWatcherService.class), + mock(SSLService.class), mock(NativeUsersStore.class), mock(NativeRoleMappingStore.class), lifecycleService); + assertThat(factories, hasEntry(is(NativeRealm.TYPE), any(Realm.Factory.class))); + verifyZeroInteractions(lifecycleService); + + Settings settings = Settings.builder().put("path.home", createTempDir()).build(); + factories.get(NativeRealm.TYPE).create(new RealmConfig("test", Settings.EMPTY, settings, new ThreadContext(settings))); + verify(lifecycleService).addSecurityIndexHealthChangeListener(isA(BiConsumer.class)); + + factories.get(NativeRealm.TYPE).create(new RealmConfig("test", Settings.EMPTY, settings, new ThreadContext(settings))); + verify(lifecycleService, times(2)).addSecurityIndexHealthChangeListener(isA(BiConsumer.class)); + } +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RealmSettingsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RealmSettingsTests.java index 66b42330a44..c8db4cce2eb 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RealmSettingsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RealmSettingsTests.java @@ -134,7 +134,7 @@ public class RealmSettingsTests extends ESTestCase { if (userSearch) { builder.put("user_search.base_dn", "o=people, dc=example, dc=com"); builder.put("user_search.scope", "sub_tree"); - builder.put("user_search.attribute", randomAlphaOfLengthBetween(2, 5)); + builder.put("user_search.filter", "(" + randomAlphaOfLengthBetween(2, 5) + "={0})"); builder.put("user_search.pool.enabled", randomBoolean()); builder.put("user_search.pool.size", randomIntBetween(10, 100)); builder.put("user_search.pool.initial_size", randomIntBetween(1, 10)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java index c0ff17d6152..c977ecc2214 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/RealmsTests.java @@ -9,14 +9,14 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.security.user.User; -import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; -import org.elasticsearch.xpack.security.authc.esnative.NativeRealm; -import org.elasticsearch.xpack.security.authc.file.FileRealm; -import org.elasticsearch.xpack.security.authc.ldap.LdapRealm; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.license.XPackLicenseState.AllowedRealmType; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.authc.esnative.NativeRealm; +import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; +import org.elasticsearch.xpack.security.authc.file.FileRealm; +import org.elasticsearch.xpack.security.authc.ldap.LdapRealm; +import org.elasticsearch.xpack.security.user.User; import org.junit.Before; import java.util.ArrayList; @@ -436,8 +436,8 @@ public class RealmsTests extends ESTestCase { } @Override - public void authenticate(AuthenticationToken token, ActionListener listener, IncomingRequest incomingRequest) { - listener.onResponse(null); + public void authenticate(AuthenticationToken token, ActionListener listener) { + listener.onResponse(AuthenticationResult.notHandled()); } @Override diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java index 6dae2948591..25ce51cd730 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java @@ -40,11 +40,6 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase { useSSL = randomBoolean(); } - @Override - public boolean shouldSetReservedUserPasswords() { - return false; - } - @Override public Settings nodeSettings(int nodeOrdinal) { logger.info("--> use SSL? {}", useSSL); @@ -62,6 +57,11 @@ public class ESNativeMigrateToolTests extends NativeRealmIntegTestCase { return useSSL == false; } + @Override + protected boolean shouldSetReservedUserPasswords() { + return false; + } + private Environment nodeEnvironment() throws Exception { return internalCluster().getInstances(Environment.class).iterator().next(); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java new file mode 100644 index 00000000000..eb6b3de2f51 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmTests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.esnative; + +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.health.ClusterIndexHealth; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.authc.RealmConfig; + +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.xpack.security.test.SecurityTestUtils.getClusterIndexHealth; +import static org.mockito.Mockito.mock; + +public class NativeRealmTests extends ESTestCase { + + public void testCacheClearOnIndexHealthChange() { + final AtomicInteger numInvalidation = new AtomicInteger(0); + int expectedInvalidation = 0; + Settings settings = Settings.builder().put("path.home", createTempDir()).build(); + RealmConfig config = new RealmConfig("native", Settings.EMPTY, settings, new ThreadContext(settings)); + final NativeRealm nativeRealm = new NativeRealm(config, mock(NativeUsersStore.class)) { + @Override + void clearCache() { + numInvalidation.incrementAndGet(); + } + }; + + // existing to no longer present + ClusterIndexHealth previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + ClusterIndexHealth currentHealth = null; + nativeRealm.onSecurityIndexHealthChange(previousHealth, currentHealth); + assertEquals(++expectedInvalidation, numInvalidation.get()); + + // doesn't exist to exists + previousHealth = null; + currentHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + nativeRealm.onSecurityIndexHealthChange(previousHealth, currentHealth); + assertEquals(++expectedInvalidation, numInvalidation.get()); + + // green or yellow to red + previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentHealth = getClusterIndexHealth(ClusterHealthStatus.RED); + nativeRealm.onSecurityIndexHealthChange(previousHealth, currentHealth); + assertEquals(expectedInvalidation, numInvalidation.get()); + + // red to non red + previousHealth = getClusterIndexHealth(ClusterHealthStatus.RED); + currentHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + nativeRealm.onSecurityIndexHealthChange(previousHealth, currentHealth); + assertEquals(++expectedInvalidation, numInvalidation.get()); + + // green to yellow or yellow to green + previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentHealth = getClusterIndexHealth( + previousHealth.getStatus() == ClusterHealthStatus.GREEN ? ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); + nativeRealm.onSecurityIndexHealthChange(previousHealth, currentHealth); + assertEquals(expectedInvalidation, numInvalidation.get()); + } +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java index 01e87e8a8c9..637903c935e 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java @@ -17,7 +17,8 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.XPackSettings; import org.elasticsearch.xpack.security.SecurityLifecycleService; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.xpack.security.action.user.ChangePasswordRequest; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ReservedUserInfo; import org.elasticsearch.xpack.security.authc.support.Hasher; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; @@ -32,20 +33,16 @@ import org.mockito.ArgumentCaptor; import java.net.InetAddress; import java.net.InetSocketAddress; -import java.net.UnknownHostException; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Map.Entry; -import java.util.concurrent.ExecutionException; import java.util.function.Predicate; -import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; @@ -67,34 +64,16 @@ public class ReservedRealmTests extends ESTestCase { public static final String ACCEPT_DEFAULT_PASSWORDS = ReservedRealm.ACCEPT_DEFAULT_PASSWORD_SETTING.getKey(); private NativeUsersStore usersStore; private SecurityLifecycleService securityLifecycleService; - private IncomingRequest incomingRequest; @Before public void setupMocks() throws Exception { usersStore = mock(NativeUsersStore.class); securityLifecycleService = mock(SecurityLifecycleService.class); - incomingRequest = mock(IncomingRequest.class); when(securityLifecycleService.isSecurityIndexAvailable()).thenReturn(true); when(securityLifecycleService.checkSecurityMappingVersion(any())).thenReturn(true); mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); } - @SuppressForbidden(reason = "allow getting localhost") - public void testMappingVersionFromBeforeUserExisted() throws ExecutionException, InterruptedException, UnknownHostException { - when(securityLifecycleService.checkSecurityMappingVersion(any())).thenReturn(false); - final ReservedRealm reservedRealm = - new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); - final String principal = ElasticUser.NAME; - - PlainActionFuture future = new PlainActionFuture<>(); - InetSocketAddress address = new InetSocketAddress(InetAddress.getLocalHost(), 100); - when(incomingRequest.getRemoteAddress()).thenReturn(address); - when(incomingRequest.getType()).thenReturn(IncomingRequest.RequestType.REST); - reservedRealm.authenticate(new UsernamePasswordToken(principal, EMPTY_PASSWORD), future, incomingRequest); - assertThat(future.get().enabled(), equalTo(false)); - } - public void testDisableDefaultPasswordAuthentication() throws Throwable { final User expected = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); @@ -104,22 +83,12 @@ public class ReservedRealmTests extends ESTestCase { final ReservedRealm reservedRealm = new ReservedRealm(environment, settings, usersStore, anonymousUser, securityLifecycleService, new ThreadContext(Settings.EMPTY)); - final ActionListener listener = new ActionListener() { - @Override - public void onResponse(User user) { - fail("Authentication should have failed because default-password is not allowed"); - } - - @Override - public void onFailure(Exception e) { - assertThat(e, instanceOf(ElasticsearchSecurityException.class)); - assertThat(e.getMessage(), containsString("failed to authenticate")); - } - }; - reservedRealm.doAuthenticate(new UsernamePasswordToken(expected.principal(), EMPTY_PASSWORD), listener, incomingRequest); + PlainActionFuture listener = new PlainActionFuture<>(); + reservedRealm.doAuthenticate(new UsernamePasswordToken(expected.principal(), EMPTY_PASSWORD), listener); + assertFailedAuthentication(listener, expected.principal()); } - public void testElasticEmptyPasswordAuthenticationFailsFromNonLocalhost() throws Throwable { + public void testElasticEmptyPasswordAuthenticationFails() throws Throwable { final User expected = new ElasticUser(true); final String principal = expected.principal(); @@ -128,39 +97,10 @@ public class ReservedRealmTests extends ESTestCase { new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); - PlainActionFuture listener = new PlainActionFuture<>(); + PlainActionFuture listener = new PlainActionFuture<>(); - InetSocketAddress address = new InetSocketAddress(InetAddress.getByName("128.9.8.1"), 100); - - when(incomingRequest.getRemoteAddress()).thenReturn(address); - reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, EMPTY_PASSWORD), listener, incomingRequest); - - ElasticsearchSecurityException actual = expectThrows(ElasticsearchSecurityException.class, listener::actionGet); - assertThat(actual.getMessage(), containsString("failed to authenticate user [" + principal)); - } - - @SuppressForbidden(reason = "allow getting localhost") - public void testElasticEmptyPasswordAuthenticationSucceedsInSetupModeIfRestRequestComesFromLocalhost() throws Throwable { - final User expected = new ElasticUser(true, true); - final String principal = expected.principal(); - - Settings settings = Settings.builder().put(ACCEPT_DEFAULT_PASSWORDS, true).build(); - final ReservedRealm reservedRealm = - new ReservedRealm(mock(Environment.class), settings, usersStore, - new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); - - PlainActionFuture listener = new PlainActionFuture<>(); - - InetSocketAddress address = new InetSocketAddress(InetAddress.getLocalHost(), 100); - - when(incomingRequest.getRemoteAddress()).thenReturn(address); - when(incomingRequest.getType()).thenReturn(IncomingRequest.RequestType.REST); - reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, EMPTY_PASSWORD), listener, incomingRequest); - - User user = listener.actionGet(); - - assertEquals(expected, user); - assertNotEquals(new ElasticUser(true, false), user); + reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, EMPTY_PASSWORD), listener); + assertFailedAuthentication(listener, expected.principal()); } public void testAuthenticationDisabled() throws Throwable { @@ -175,10 +115,11 @@ public class ReservedRealmTests extends ESTestCase { final User expected = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expected.principal(); - PlainActionFuture listener = new PlainActionFuture<>(); - reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, EMPTY_PASSWORD), listener, mock(IncomingRequest.class)); - final User authenticated = listener.actionGet(); - assertNull(authenticated); + PlainActionFuture listener = new PlainActionFuture<>(); + reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, EMPTY_PASSWORD), listener); + final AuthenticationResult result = listener.actionGet(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.CONTINUE)); + assertNull(result.getUser()); verifyZeroInteractions(usersStore); } @@ -190,7 +131,7 @@ public class ReservedRealmTests extends ESTestCase { verifySuccessfulAuthentication(false); } - private void verifySuccessfulAuthentication(boolean enabled) { + private void verifySuccessfulAuthentication(boolean enabled) throws Exception { final Settings settings = Settings.builder().put(ACCEPT_DEFAULT_PASSWORDS, randomBoolean()).build(); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings), securityLifecycleService, new ThreadContext(Settings.EMPTY)); @@ -205,10 +146,9 @@ public class ReservedRealmTests extends ESTestCase { }).when(usersStore).getReservedUserInfo(eq(principal), any(ActionListener.class)); // test empty password - final PlainActionFuture listener = new PlainActionFuture<>(); - reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, EMPTY_PASSWORD), listener, incomingRequest); - ElasticsearchSecurityException expected = expectThrows(ElasticsearchSecurityException.class, listener::actionGet); - assertThat(expected.getMessage(), containsString("failed to authenticate user [" + principal)); + final PlainActionFuture listener = new PlainActionFuture<>(); + reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, EMPTY_PASSWORD), listener); + assertFailedAuthentication(listener, expectedUser.principal()); // the realm assumes it owns the hashed password so it fills it with 0's doAnswer((i) -> { @@ -218,9 +158,9 @@ public class ReservedRealmTests extends ESTestCase { }).when(usersStore).getReservedUserInfo(eq(principal), any(ActionListener.class)); // test new password - final PlainActionFuture authListener = new PlainActionFuture<>(); - reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, newPassword), authListener, incomingRequest); - final User authenticated = authListener.actionGet(); + final PlainActionFuture authListener = new PlainActionFuture<>(); + reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, newPassword), authListener); + final User authenticated = authListener.actionGet().getUser(); assertEquals(expectedUser, authenticated); assertThat(expectedUser.enabled(), is(enabled)); @@ -236,7 +176,7 @@ public class ReservedRealmTests extends ESTestCase { final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); - final User expectedUser = randomFrom(new ElasticUser(true, true), new KibanaUser(true), new LogstashSystemUser(true)); + final User expectedUser = randomFrom(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true)); final String principal = expectedUser.principal(); PlainActionFuture listener = new PlainActionFuture<>(); @@ -324,7 +264,7 @@ public class ReservedRealmTests extends ESTestCase { new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); PlainActionFuture> userFuture = new PlainActionFuture<>(); reservedRealm.users(userFuture); - assertThat(userFuture.actionGet(), containsInAnyOrder(new ElasticUser(true, true), new KibanaUser(true), + assertThat(userFuture.actionGet(), containsInAnyOrder(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true), new BeatsSystemUser(true))); } @@ -346,31 +286,91 @@ public class ReservedRealmTests extends ESTestCase { } } - @SuppressForbidden(reason = "allow getting localhost") - public void testFailedAuthentication() throws UnknownHostException { + public void testFailedAuthentication() throws Exception { + when(securityLifecycleService.isSecurityIndexExisting()).thenReturn(true); + SecureString password = new SecureString("password".toCharArray()); + char[] hash = Hasher.BCRYPT.hash(password); + ReservedUserInfo userInfo = new ReservedUserInfo(hash, true, false); + mockGetAllReservedUserInfo(usersStore, Collections.singletonMap("elastic", userInfo)); final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); - InetSocketAddress address = new InetSocketAddress(InetAddress.getLocalHost(), 100); - // maybe cache a successful auth if (randomBoolean()) { - PlainActionFuture future = new PlainActionFuture<>(); + PlainActionFuture future = new PlainActionFuture<>(); - IncomingRequest r = mock(IncomingRequest.class); - when(r.getRemoteAddress()).thenReturn(address); - when(r.getType()).thenReturn(IncomingRequest.RequestType.REST); - reservedRealm.authenticate(new UsernamePasswordToken(ElasticUser.NAME, EMPTY_PASSWORD), future, r); - User user = future.actionGet(); - assertEquals(new ElasticUser(true, true), user); + reservedRealm.authenticate(new UsernamePasswordToken(ElasticUser.NAME, password), future); + User user = future.actionGet().getUser(); + assertEquals(new ElasticUser(true), user); } - PlainActionFuture future = new PlainActionFuture<>(); - IncomingRequest r = mock(IncomingRequest.class); - when(r.getRemoteAddress()).thenReturn(address); - when(r.getType()).thenReturn(IncomingRequest.RequestType.REST); - reservedRealm.authenticate(new UsernamePasswordToken(ElasticUser.NAME, new SecureString("foobar".toCharArray())), future, r); - ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, future::actionGet); - assertThat(e.getMessage(), containsString("failed to authenticate")); + PlainActionFuture future = new PlainActionFuture<>(); + reservedRealm.authenticate(new UsernamePasswordToken(ElasticUser.NAME, new SecureString("foobar".toCharArray())), future); + assertFailedAuthentication(future, ElasticUser.NAME); + } + + private void assertFailedAuthentication(PlainActionFuture future, String principal) throws Exception { + final AuthenticationResult result = future.get(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.TERMINATE)); + assertThat(result.getMessage(), containsString("failed to authenticate")); + assertThat(result.getMessage(), containsString(principal)); + } + + @SuppressWarnings("unchecked") + public void testBootstrapElasticPassword() { + ReservedUserInfo user = new ReservedUserInfo(ReservedRealm.EMPTY_PASSWORD_HASH, true, true); + mockGetAllReservedUserInfo(usersStore, Collections.singletonMap(ElasticUser.NAME, user)); + Settings settings = Settings.builder().build(); + when(securityLifecycleService.isSecurityIndexExisting()).thenReturn(true); + + final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, + new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + PlainActionFuture listenerFuture = new PlainActionFuture<>(); + SecureString passwordHash = new SecureString(randomAlphaOfLength(10).toCharArray()); + reservedRealm.bootstrapElasticUserCredentials(passwordHash, listenerFuture); + + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ChangePasswordRequest.class); + ArgumentCaptor listenerCaptor = ArgumentCaptor.forClass(ActionListener.class); + verify(usersStore).changePassword(requestCaptor.capture(), listenerCaptor.capture()); + assertEquals(passwordHash.getChars(), requestCaptor.getValue().passwordHash()); + + listenerCaptor.getValue().onResponse(null); + + assertTrue(listenerFuture.actionGet()); + } + + public void testBootstrapElasticPasswordNotSetIfPasswordExists() { + mockGetAllReservedUserInfo(usersStore, Collections.singletonMap(ElasticUser.NAME, new ReservedUserInfo(new char[7], true, false))); + when(securityLifecycleService.isSecurityIndexExisting()).thenReturn(true); + + Settings settings = Settings.builder().build(); + final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, + new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + SecureString passwordHash = new SecureString(randomAlphaOfLength(10).toCharArray()); + reservedRealm.bootstrapElasticUserCredentials(passwordHash, new PlainActionFuture<>()); + + verify(usersStore, times(0)).changePassword(any(ChangePasswordRequest.class), any()); + } + + public void testBootstrapElasticPasswordSettingFails() { + ReservedUserInfo user = new ReservedUserInfo(ReservedRealm.EMPTY_PASSWORD_HASH, true, true); + mockGetAllReservedUserInfo(usersStore, Collections.singletonMap(ElasticUser.NAME, user)); + Settings settings = Settings.builder().build(); + when(securityLifecycleService.isSecurityIndexExisting()).thenReturn(true); + + final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, + new AnonymousUser(Settings.EMPTY), securityLifecycleService, new ThreadContext(Settings.EMPTY)); + PlainActionFuture listenerFuture = new PlainActionFuture<>(); + SecureString passwordHash = new SecureString(randomAlphaOfLength(10).toCharArray()); + reservedRealm.bootstrapElasticUserCredentials(passwordHash, listenerFuture); + + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ChangePasswordRequest.class); + ArgumentCaptor listenerCaptor = ArgumentCaptor.forClass(ActionListener.class); + verify(usersStore).changePassword(requestCaptor.capture(), listenerCaptor.capture()); + assertEquals(passwordHash.getChars(), requestCaptor.getValue().passwordHash()); + + listenerCaptor.getValue().onFailure(new RuntimeException()); + + expectThrows(RuntimeException.class, listenerFuture::actionGet); } /* diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java index 1136ba53486..b6665881a0b 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java @@ -9,10 +9,12 @@ import org.elasticsearch.cli.Command; import org.elasticsearch.cli.CommandTestCase; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserException; +import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.user.BeatsSystemUser; import org.elasticsearch.xpack.security.user.ElasticUser; import org.elasticsearch.xpack.security.user.KibanaUser; @@ -23,24 +25,33 @@ import org.mockito.InOrder; import org.mockito.Mockito; import java.io.IOException; +import java.security.GeneralSecurityException; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.contains; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; public class SetupPasswordToolTests extends CommandTestCase { private final String pathHomeParameter = "-Epath.home=" + createTempDir(); + private SecureString bootstrapPassword = new SecureString("bootstrap-password".toCharArray()); private final String ep = "elastic-password"; private final String kp = "kibana-password"; private final String lp = "logstash-password"; private final String bp = "beats-password"; private CommandLineHttpClient httpClient; + private KeyStoreWrapper keyStore; @Before - public void setSecrets() { + public void setSecretsAndKeyStore() throws GeneralSecurityException { + this.keyStore = mock(KeyStoreWrapper.class); + this.httpClient = mock(CommandLineHttpClient.class); + when(keyStore.getString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey())).thenReturn(bootstrapPassword); + terminal.addSecretInput(ep); terminal.addSecretInput(ep); terminal.addSecretInput(kp); @@ -53,19 +64,20 @@ public class SetupPasswordToolTests extends CommandTestCase { @Override protected Command newCommand() { - this.httpClient = mock(CommandLineHttpClient.class); - return new SetupPasswordTool((e) -> httpClient); + return new SetupPasswordTool((e) -> httpClient, (e) -> keyStore); } public void testAutoSetup() throws Exception { execute("auto", pathHomeParameter, "-b", "true"); + verify(keyStore).decrypt(new char[0]); + ArgumentCaptor passwordCaptor = ArgumentCaptor.forClass(String.class); - SecureString defaultPassword = new SecureString("".toCharArray()); InOrder inOrder = Mockito.inOrder(httpClient); String elasticUrl = "http://localhost:9200/_xpack/security/user/elastic/_password"; - inOrder.verify(httpClient).postURL(eq("PUT"), eq(elasticUrl), eq(ElasticUser.NAME), eq(defaultPassword), passwordCaptor.capture()); + inOrder.verify(httpClient).postURL(eq("PUT"), eq(elasticUrl), eq(ElasticUser.NAME), eq(bootstrapPassword), + passwordCaptor.capture()); String[] users = {KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME}; SecureString newPassword = new SecureString(parsePassword(passwordCaptor.getValue()).toCharArray()); @@ -80,11 +92,11 @@ public class SetupPasswordToolTests extends CommandTestCase { execute("auto", pathHomeParameter, "-u", url, "-b"); ArgumentCaptor passwordCaptor = ArgumentCaptor.forClass(String.class); - SecureString defaultPassword = new SecureString("".toCharArray()); InOrder inOrder = Mockito.inOrder(httpClient); String elasticUrl = url + "/_xpack/security/user/elastic/_password"; - inOrder.verify(httpClient).postURL(eq("PUT"), eq(elasticUrl), eq(ElasticUser.NAME), eq(defaultPassword), passwordCaptor.capture()); + inOrder.verify(httpClient).postURL(eq("PUT"), eq(elasticUrl), eq(ElasticUser.NAME), eq(bootstrapPassword), + passwordCaptor.capture()); String[] users = {KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME}; SecureString newPassword = new SecureString(parsePassword(passwordCaptor.getValue()).toCharArray()); @@ -99,12 +111,10 @@ public class SetupPasswordToolTests extends CommandTestCase { execute("interactive", pathHomeParameter); - SecureString defaultPassword = new SecureString("".toCharArray()); - InOrder inOrder = Mockito.inOrder(httpClient); String elasticUrl = "http://localhost:9200/_xpack/security/user/elastic/_password"; SecureString newPassword = new SecureString(ep.toCharArray()); - inOrder.verify(httpClient).postURL(eq("PUT"), eq(elasticUrl), eq(ElasticUser.NAME), eq(defaultPassword), contains(ep)); + inOrder.verify(httpClient).postURL(eq("PUT"), eq(elasticUrl), eq(ElasticUser.NAME), eq(bootstrapPassword), contains(ep)); String kibanaUrl = "http://localhost:9200/_xpack/security/user/" + KibanaUser.NAME + "/_password"; inOrder.verify(httpClient).postURL(eq("PUT"), eq(kibanaUrl), eq(ElasticUser.NAME), eq(newPassword), contains(kp)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileRealmTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileRealmTests.java index 5245812c4b8..2bf0a77ec65 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileRealmTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileRealmTests.java @@ -5,22 +5,24 @@ */ package org.elasticsearch.xpack.security.authc.file; +import java.util.Locale; +import java.util.Map; +import java.util.function.Supplier; + import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.Hasher; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.security.user.User; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.watcher.ResourceWatcherService; import org.junit.Before; - -import java.util.Locale; -import java.util.Map; +import org.mockito.stubbing.Answer; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; @@ -29,6 +31,8 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -37,6 +41,12 @@ import static org.mockito.Mockito.when; public class FileRealmTests extends ESTestCase { + private static final Answer VERIFY_PASSWORD_ANSWER = inv -> { + assertThat(inv.getArguments().length, is(3)); + Supplier supplier = (Supplier) inv.getArguments()[2]; + return AuthenticationResult.success(supplier.get()); + }; + private FileUserPasswdStore userPasswdStore; private FileUserRolesStore userRolesStore; private Settings globalSettings; @@ -49,13 +59,16 @@ public class FileRealmTests extends ESTestCase { } public void testAuthenticate() throws Exception { - when(userPasswdStore.verifyPassword("user1", new SecureString("test123"))).thenReturn(true); + when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class))) + .thenAnswer(VERIFY_PASSWORD_ANSWER); when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" }); RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); + final AuthenticationResult result = future.actionGet(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + User user = result.getUser(); assertThat(user, notNullValue()); assertThat(user.principal(), equalTo("user1")); assertThat(user.roles(), notNullValue()); @@ -68,15 +81,16 @@ public class FileRealmTests extends ESTestCase { .put("cache.hash_algo", Hasher.values()[randomIntBetween(0, Hasher.values().length - 1)].name().toLowerCase(Locale.ROOT)) .build(); RealmConfig config = new RealmConfig("file-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - when(userPasswdStore.verifyPassword("user1", new SecureString("test123"))).thenReturn(true); + when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class))) + .thenAnswer(VERIFY_PASSWORD_ANSWER); when(userRolesStore.roles("user1")).thenReturn(new String[]{"role1", "role2"}); FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future, mock(IncomingRequest.class)); - User user1 = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); + User user1 = future.actionGet().getUser(); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future, mock(IncomingRequest.class)); - User user2 = future.actionGet(); + realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); + User user2 = future.actionGet().getUser(); assertThat(user1, sameInstance(user2)); } @@ -84,43 +98,45 @@ public class FileRealmTests extends ESTestCase { RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); userPasswdStore = spy(new UserPasswdStore(config)); userRolesStore = spy(new UserRolesStore(config)); - doReturn(true).when(userPasswdStore).verifyPassword("user1", new SecureString("test123")); + when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class))) + .thenAnswer(VERIFY_PASSWORD_ANSWER); doReturn(new String[] { "role1", "role2" }).when(userRolesStore).roles("user1"); FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future, mock(IncomingRequest.class)); - User user1 = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); + User user1 = future.actionGet().getUser(); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future, mock(IncomingRequest.class)); - User user2 = future.actionGet(); + realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); + User user2 = future.actionGet().getUser(); assertThat(user1, sameInstance(user2)); userPasswdStore.notifyRefresh(); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future, mock(IncomingRequest.class)); - User user3 = future.actionGet(); + realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); + User user3 = future.actionGet().getUser(); assertThat(user2, not(sameInstance(user3))); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future, mock(IncomingRequest.class)); - User user4 = future.actionGet(); + realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); + User user4 = future.actionGet().getUser(); assertThat(user3, sameInstance(user4)); userRolesStore.notifyRefresh(); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future, mock(IncomingRequest.class)); - User user5 = future.actionGet(); + realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); + User user5 = future.actionGet().getUser(); assertThat(user4, not(sameInstance(user5))); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future, mock(IncomingRequest.class)); - User user6 = future.actionGet(); + realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); + User user6 = future.actionGet().getUser(); assertThat(user5, sameInstance(user6)); } public void testToken() throws Exception { RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - when(userPasswdStore.verifyPassword("user1", new SecureString("test123"))).thenReturn(true); + when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class))) + .thenAnswer(VERIFY_PASSWORD_ANSWER); when(userRolesStore.roles("user1")).thenReturn(new String[]{"role1", "role2"}); FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java index 424661ef513..2a5c72b875d 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/file/FileUserPasswdStoreTests.java @@ -5,22 +5,6 @@ */ package org.elasticsearch.xpack.security.authc.file; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger; -import org.elasticsearch.xpack.security.authc.RealmConfig; -import org.elasticsearch.xpack.security.authc.support.Hasher; -import org.elasticsearch.xpack.XPackPlugin; -import org.junit.After; -import org.junit.Before; - import java.io.BufferedWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -34,6 +18,24 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.XPackPlugin; +import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; +import org.elasticsearch.xpack.security.authc.RealmConfig; +import org.elasticsearch.xpack.security.authc.support.Hasher; +import org.elasticsearch.xpack.security.user.User; +import org.junit.After; +import org.junit.Before; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -91,8 +93,11 @@ public class FileUserPasswdStoreTests extends ESTestCase { FileUserPasswdStore store = new FileUserPasswdStore(config, watcherService, latch::countDown); + User user = new User("bcrypt"); assertThat(store.userExists("bcrypt"), is(true)); - assertThat(store.verifyPassword("bcrypt", new SecureString("test123")), is(true)); + AuthenticationResult result = store.verifyPassword("bcrypt", new SecureString("test123"), () -> user); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + assertThat(result.getUser(), is(user)); watcherService.start(); @@ -106,7 +111,9 @@ public class FileUserPasswdStoreTests extends ESTestCase { } assertThat(store.userExists("foobar"), is(true)); - assertThat(store.verifyPassword("foobar", new SecureString("barfoo")), is(true)); + result = store.verifyPassword("foobar", new SecureString("barfoo"), () -> user); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + assertThat(result.getUser(), is(user)); } public void testStore_AutoReload_WithParseFailures() throws Exception { @@ -126,7 +133,10 @@ public class FileUserPasswdStoreTests extends ESTestCase { FileUserPasswdStore store = new FileUserPasswdStore(config, watcherService, latch::countDown); - assertTrue(store.verifyPassword("bcrypt", new SecureString("test123"))); + User user = new User("bcrypt"); + final AuthenticationResult result = store.verifyPassword("bcrypt", new SecureString("test123"), () -> user); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + assertThat(result.getUser(), is(user)); watcherService.start(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java index 256d474fdc9..deba662dec0 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java @@ -29,10 +29,11 @@ public class ActiveDirectoryGroupsResolverTests extends GroupsResolverTestCase { public void testResolveSubTree() throws Exception { Settings settings = Settings.builder() - .put("scope", LdapSearchScope.SUB_TREE) + .put("group_search.scope", LdapSearchScope.SUB_TREE) + .put("group_search.base_dn", "DC=ad,DC=test,DC=elasticsearch,DC=com") + .put("domain_name", "ad.test.elasticsearch.com") .build(); - ActiveDirectoryGroupsResolver resolver = new ActiveDirectoryGroupsResolver(settings, - "DC=ad,DC=test,DC=elasticsearch,DC=com", false); + ActiveDirectoryGroupsResolver resolver = new ActiveDirectoryGroupsResolver(settings); List groups = resolveBlocking(resolver, ldapConnection, BRUCE_BANNER_DN, TimeValue.timeValueSeconds(10), NoOpLogger.INSTANCE, null); assertThat(groups, containsInAnyOrder( @@ -48,10 +49,10 @@ public class ActiveDirectoryGroupsResolverTests extends GroupsResolverTestCase { public void testResolveOneLevel() throws Exception { Settings settings = Settings.builder() .put("scope", LdapSearchScope.ONE_LEVEL) - .put("base_dn", "CN=Builtin, DC=ad, DC=test, DC=elasticsearch,DC=com") + .put("group_search.base_dn", "CN=Builtin, DC=ad, DC=test, DC=elasticsearch,DC=com") + .put("domain_name", "ad.test.elasticsearch.com") .build(); - ActiveDirectoryGroupsResolver resolver = new ActiveDirectoryGroupsResolver(settings, - "DC=ad,DC=test,DC=elasticsearch,DC=com", false); + ActiveDirectoryGroupsResolver resolver = new ActiveDirectoryGroupsResolver(settings); List groups = resolveBlocking(resolver, ldapConnection, BRUCE_BANNER_DN, TimeValue.timeValueSeconds(10), NoOpLogger.INSTANCE, null); assertThat(groups, hasItem(containsString("Users"))); @@ -59,11 +60,11 @@ public class ActiveDirectoryGroupsResolverTests extends GroupsResolverTestCase { public void testResolveBaseLevel() throws Exception { Settings settings = Settings.builder() - .put("scope", LdapSearchScope.BASE) - .put("base_dn", "CN=Users, CN=Builtin, DC=ad, DC=test, DC=elasticsearch, DC=com") + .put("group_search.scope", LdapSearchScope.BASE) + .put("group_search.base_dn", "CN=Users, CN=Builtin, DC=ad, DC=test, DC=elasticsearch, DC=com") + .put("domain_name", "ad.test.elasticsearch.com") .build(); - ActiveDirectoryGroupsResolver resolver = new ActiveDirectoryGroupsResolver(settings, - "DC=ad,DC=test,DC=elasticsearch,DC=com", false); + ActiveDirectoryGroupsResolver resolver = new ActiveDirectoryGroupsResolver(settings); List groups = resolveBlocking(resolver, ldapConnection, BRUCE_BANNER_DN, TimeValue.timeValueSeconds(10), NoOpLogger.INSTANCE, null); assertThat(groups, hasItem(containsString("CN=Users,CN=Builtin"))); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java index 56fb5df3c0f..dce6fc075d9 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactory.DownLevelADAuthenticator; import org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactory.UpnADAuthenticator; import org.elasticsearch.xpack.security.user.User; @@ -136,12 +136,14 @@ public class ActiveDirectoryRealmTests extends ESTestCase { Settings settings = settings(); RealmConfig config = new RealmConfig("testAuthenticateUserPrincipleName", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); - DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService); + DnRoleMapper roleMapper = new DnRoleMapper(config, resourceWatcherService); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future); + final AuthenticationResult result = future.actionGet(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + final User user = result.getUser(); assertThat(user, is(notNullValue())); assertThat(user.roles(), arrayContaining(containsString("Avengers"))); } @@ -150,13 +152,13 @@ public class ActiveDirectoryRealmTests extends ESTestCase { Settings settings = settings(); RealmConfig config = new RealmConfig("testAuthenticateSAMAccountName", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); - DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService); + DnRoleMapper roleMapper = new DnRoleMapper(config, resourceWatcherService); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); // Thor does not have a UPN of form CN=Thor@ad.test.elasticsearch.com - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("CN=Thor", new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("CN=Thor", new SecureString(PASSWORD)), future); + User user = future.actionGet().getUser(); assertThat(user, is(notNullValue())); assertThat(user.roles(), arrayContaining(containsString("Avengers"))); } @@ -170,17 +172,17 @@ public class ActiveDirectoryRealmTests extends ESTestCase { return urls.toArray(Strings.EMPTY_ARRAY); } - public void testAuthenticateCachesSuccesfulAuthentications() throws Exception { + public void testAuthenticateCachesSuccessfulAuthentications() throws Exception { Settings settings = settings(); RealmConfig config = new RealmConfig("testAuthenticateCachesSuccesfulAuthentications", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); ActiveDirectorySessionFactory sessionFactory = spy(new ActiveDirectorySessionFactory(config, sslService)); - DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService); + DnRoleMapper roleMapper = new DnRoleMapper(config, resourceWatcherService); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); int count = randomIntBetween(2, 10); for (int i = 0; i < count; i++) { - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future); future.actionGet(); } @@ -192,13 +194,13 @@ public class ActiveDirectoryRealmTests extends ESTestCase { Settings settings = settings(Settings.builder().put(CachingUsernamePasswordRealm.CACHE_TTL_SETTING.getKey(), -1).build()); RealmConfig config = new RealmConfig("testAuthenticateCachingCanBeDisabled", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); ActiveDirectorySessionFactory sessionFactory = spy(new ActiveDirectorySessionFactory(config, sslService)); - DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService); + DnRoleMapper roleMapper = new DnRoleMapper(config, resourceWatcherService); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); int count = randomIntBetween(2, 10); for (int i = 0; i < count; i++) { - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future); future.actionGet(); } @@ -210,13 +212,13 @@ public class ActiveDirectoryRealmTests extends ESTestCase { Settings settings = settings(); RealmConfig config = new RealmConfig("testAuthenticateCachingClearsCacheOnRoleMapperRefresh", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); ActiveDirectorySessionFactory sessionFactory = spy(new ActiveDirectorySessionFactory(config, sslService)); - DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService); + DnRoleMapper roleMapper = new DnRoleMapper(config, resourceWatcherService); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); int count = randomIntBetween(2, 10); for (int i = 0; i < count; i++) { - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future); future.actionGet(); } @@ -227,8 +229,8 @@ public class ActiveDirectoryRealmTests extends ESTestCase { roleMapper.notifyRefresh(); for (int i = 0; i < count; i++) { - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future); future.actionGet(); } @@ -241,12 +243,12 @@ public class ActiveDirectoryRealmTests extends ESTestCase { .build()); RealmConfig config = new RealmConfig("testRealmMapsGroupsToRoles", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); - DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService); + DnRoleMapper roleMapper = new DnRoleMapper(config, resourceWatcherService); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("CN=ironman", new SecureString(PASSWORD)), future); + User user = future.actionGet().getUser(); assertThat(user, is(notNullValue())); assertThat(user.roles(), arrayContaining(equalTo("group_role"))); } @@ -257,12 +259,12 @@ public class ActiveDirectoryRealmTests extends ESTestCase { .build()); RealmConfig config = new RealmConfig("testRealmMapsGroupsToRoles", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); - DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService); + DnRoleMapper roleMapper = new DnRoleMapper(config, resourceWatcherService); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("CN=Thor", new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("CN=Thor", new SecureString(PASSWORD)), future); + User user = future.actionGet().getUser(); assertThat(user, is(notNullValue())); assertThat(user.roles(), arrayContainingInAnyOrder(equalTo("group_role"), equalTo("user_role"))); } @@ -276,7 +278,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { RealmConfig config = new RealmConfig("testRealmUsageStats", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); - DnRoleMapper roleMapper = new DnRoleMapper(LdapRealm.AD_TYPE, config, resourceWatcherService); + DnRoleMapper roleMapper = new DnRoleMapper(config, resourceWatcherService); LdapRealm realm = new LdapRealm(LdapRealm.AD_TYPE, config, sessionFactory, roleMapper, threadPool); Map stats = realm.usageStats(); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index 694beccd985..0fc2d7ad417 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.xpack.ssl.VerificationMode; +import java.util.Arrays; import java.util.List; import java.util.concurrent.ExecutionException; @@ -46,43 +47,45 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI RealmConfig config = new RealmConfig("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false), globalSettings, new ThreadContext(Settings.EMPTY)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, - sslService); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - String userName = "ironman"; - try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { - List groups = groups(ldap); - assertThat(groups, containsInAnyOrder( - containsString("Geniuses"), - containsString("Billionaire"), - containsString("Playboy"), - containsString("Philanthropists"), - containsString("Avengers"), - containsString("SHIELD"), - containsString("CN=Users,CN=Builtin"), - containsString("Domain Users"), - containsString("Supers"))); + String userName = "ironman"; + try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { + List groups = groups(ldap); + assertThat(groups, containsInAnyOrder( + containsString("Geniuses"), + containsString("Billionaire"), + containsString("Playboy"), + containsString("Philanthropists"), + containsString("Avengers"), + containsString("SHIELD"), + containsString("CN=Users,CN=Builtin"), + containsString("Domain Users"), + containsString("Supers"))); + } } } public void testNetbiosAuth() throws Exception { final String adUrl = randomFrom("ldap://54.213.145.20:3268", "ldaps://54.213.145.20:3269", AD_LDAP_URL); - RealmConfig config = new RealmConfig("ad-test", buildAdSettings(adUrl, AD_DOMAIN, false), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); + RealmConfig config = new RealmConfig("ad-test", buildAdSettings(adUrl, AD_DOMAIN, false), globalSettings, + new Environment(globalSettings), new ThreadContext(globalSettings)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - String userName = "ades\\ironman"; - try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { - List groups = groups(ldap); - assertThat(groups, containsInAnyOrder( - containsString("Geniuses"), - containsString("Billionaire"), - containsString("Playboy"), - containsString("Philanthropists"), - containsString("Avengers"), - containsString("SHIELD"), - containsString("CN=Users,CN=Builtin"), - containsString("Domain Users"), - containsString("Supers"))); + String userName = "ades\\ironman"; + try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { + List groups = groups(ldap); + assertThat(groups, containsInAnyOrder( + containsString("Geniuses"), + containsString("Billionaire"), + containsString("Playboy"), + containsString("Philanthropists"), + containsString("Avengers"), + containsString("SHIELD"), + containsString("CN=Users,CN=Builtin"), + containsString("Domain Users"), + containsString("Supers"))); + } } } @@ -94,23 +97,27 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI .put("ssl.verification_mode", VerificationMode.CERTIFICATE) .put(SessionFactory.TIMEOUT_TCP_READ_SETTING, "1ms") .build(); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); + RealmConfig config = + new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - PlainActionFuture> groups = new PlainActionFuture<>(); - session(sessionFactory, "ironman", SECURED_PASSWORD).groups(groups); - LDAPException expected = expectThrows(LDAPException.class, groups::actionGet); - assertThat(expected.getMessage(), containsString("A client-side timeout was encountered while waiting")); + PlainActionFuture> groups = new PlainActionFuture<>(); + session(sessionFactory, "ironman", SECURED_PASSWORD).groups(groups); + LDAPException expected = expectThrows(LDAPException.class, groups::actionGet); + assertThat(expected.getMessage(), containsString("A client-side timeout was encountered while waiting")); + } } public void testAdAuthAvengers() throws Exception { - RealmConfig config = new RealmConfig("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); + RealmConfig config = new RealmConfig("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false), globalSettings, + new Environment(globalSettings), new ThreadContext(globalSettings)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - String[] users = new String[]{"cap", "hawkeye", "hulk", "ironman", "thor", "blackwidow", }; - for(String user: users) { - try (LdapSession ldap = session(sessionFactory, user, SECURED_PASSWORD)) { - assertThat("group avenger test for user "+user, groups(ldap), hasItem(containsString("Avengers"))); + String[] users = new String[]{"cap", "hawkeye", "hulk", "ironman", "thor", "blackwidow"}; + for (String user : users) { + try (LdapSession ldap = session(sessionFactory, user, SECURED_PASSWORD)) { + assertThat("group avenger test for user " + user, groups(ldap), hasItem(containsString("Avengers"))); + } } } } @@ -119,21 +126,23 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI public void testAuthenticate() throws Exception { Settings settings = buildAdSettings(AD_LDAP_URL, AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, false); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); + RealmConfig config = + new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - String userName = "hulk"; - try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { - List groups = groups(ldap); + String userName = "hulk"; + try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { + List groups = groups(ldap); - assertThat(groups, containsInAnyOrder( - containsString("Avengers"), - containsString("SHIELD"), - containsString("Geniuses"), - containsString("Philanthropists"), - containsString("CN=Users,CN=Builtin"), - containsString("Domain Users"), - containsString("Supers"))); + assertThat(groups, containsInAnyOrder( + containsString("Avengers"), + containsString("SHIELD"), + containsString("Geniuses"), + containsString("Philanthropists"), + containsString("CN=Users,CN=Builtin"), + containsString("Domain Users"), + containsString("Supers"))); + } } } @@ -141,21 +150,23 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI public void testAuthenticateBaseUserSearch() throws Exception { Settings settings = buildAdSettings(AD_LDAP_URL, AD_DOMAIN, "CN=Bruce Banner, CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.BASE, false); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); + RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - String userName = "hulk"; - try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { - List groups = groups(ldap); + String userName = "hulk"; + try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { + List groups = groups(ldap); - assertThat(groups, containsInAnyOrder( - containsString("Avengers"), - containsString("SHIELD"), - containsString("Geniuses"), - containsString("Philanthropists"), - containsString("CN=Users,CN=Builtin"), - containsString("Domain Users"), - containsString("Supers"))); + assertThat(groups, containsInAnyOrder( + containsString("Avengers"), + containsString("SHIELD"), + containsString("Geniuses"), + containsString("Philanthropists"), + containsString("CN=Users,CN=Builtin"), + containsString("Domain Users"), + containsString("Supers"))); + } } } @@ -167,14 +178,16 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI "CN=Avengers,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com") .put(ActiveDirectorySessionFactory.AD_GROUP_SEARCH_SCOPE_SETTING, LdapSearchScope.BASE) .build(); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); + RealmConfig config = + new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - String userName = "hulk"; - try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { - List groups = groups(ldap); + String userName = "hulk"; + try (LdapSession ldap = session(sessionFactory, userName, SECURED_PASSWORD)) { + List groups = groups(ldap); - assertThat(groups, hasItem(containsString("Avengers"))); + assertThat(groups, hasItem(containsString("Avengers"))); + } } } @@ -182,37 +195,41 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI public void testAuthenticateWithUserPrincipalName() throws Exception { Settings settings = buildAdSettings(AD_LDAP_URL, AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, false); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); + RealmConfig config = + new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - //Login with the UserPrincipalName - String userDN = "CN=Erik Selvig,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; - try (LdapSession ldap = session(sessionFactory, "erik.selvig", SECURED_PASSWORD)) { - List groups = groups(ldap); - assertThat(ldap.userDn(), is(userDN)); - assertThat(groups, containsInAnyOrder( - containsString("Geniuses"), - containsString("CN=Users,CN=Builtin"), - containsString("Domain Users"))); + //Login with the UserPrincipalName + String userDN = "CN=Erik Selvig,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; + try (LdapSession ldap = session(sessionFactory, "erik.selvig", SECURED_PASSWORD)) { + List groups = groups(ldap); + assertThat(ldap.userDn(), is(userDN)); + assertThat(groups, containsInAnyOrder( + containsString("Geniuses"), + containsString("CN=Users,CN=Builtin"), + containsString("Domain Users"))); + } } } public void testAuthenticateWithSAMAccountName() throws Exception { Settings settings = buildAdSettings(AD_LDAP_URL, AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, false); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); + RealmConfig config = + new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - //login with sAMAccountName - String userDN = "CN=Erik Selvig,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; - try (LdapSession ldap = session(sessionFactory, "selvig", SECURED_PASSWORD)) { - assertThat(ldap.userDn(), is(userDN)); + //login with sAMAccountName + String userDN = "CN=Erik Selvig,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; + try (LdapSession ldap = session(sessionFactory, "selvig", SECURED_PASSWORD)) { + assertThat(ldap.userDn(), is(userDN)); - List groups = groups(ldap); - assertThat(groups, containsInAnyOrder( - containsString("Geniuses"), - containsString("CN=Users,CN=Builtin"), - containsString("Domain Users"))); + List groups = groups(ldap); + assertThat(groups, containsInAnyOrder( + containsString("Geniuses"), + containsString("CN=Users,CN=Builtin"), + containsString("Domain Users"))); + } } } @@ -224,16 +241,18 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI .put(ActiveDirectorySessionFactory.AD_USER_SEARCH_FILTER_SETTING, "(&(objectclass=user)(userPrincipalName={0}@ad.test.elasticsearch.com))") .build(); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); + RealmConfig config = + new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - //Login with the UserPrincipalName - try (LdapSession ldap = session(sessionFactory, "erik.selvig", SECURED_PASSWORD)) { - List groups = groups(ldap); - assertThat(groups, containsInAnyOrder( - containsString("CN=Geniuses"), - containsString("CN=Domain Users"), - containsString("CN=Users,CN=Builtin"))); + //Login with the UserPrincipalName + try (LdapSession ldap = session(sessionFactory, "erik.selvig", SECURED_PASSWORD)) { + List groups = groups(ldap); + assertThat(groups, containsInAnyOrder( + containsString("CN=Geniuses"), + containsString("CN=Domain Users"), + containsString("CN=Users,CN=Builtin"))); + } } } @@ -256,7 +275,8 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI .put("ssl.truststore.password", "changeit") .build(); } - RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService); String user = "Bruce Banner"; @@ -290,7 +310,8 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI .put("ssl.truststore.password", "changeit") .build(); } - RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService); String user = "Bruce Banner"; @@ -318,7 +339,8 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI .put("ssl.truststore.password", "changeit") .build(); } - RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService); String user = "Bruce Banner"; @@ -334,16 +356,19 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI } public void testAdAuthWithHostnameVerification() throws Exception { - RealmConfig config = new RealmConfig("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, true), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); - ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService); + RealmConfig config = new RealmConfig("ad-test", buildAdSettings(AD_LDAP_URL, AD_DOMAIN, true), globalSettings, + new Environment(globalSettings), new ThreadContext(globalSettings)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { - String userName = "ironman"; - UncategorizedExecutionException e = expectThrows(UncategorizedExecutionException.class, - () -> session(sessionFactory, userName, SECURED_PASSWORD)); - assertThat(e.getCause(), instanceOf(ExecutionException.class)); - assertThat(e.getCause().getCause(), instanceOf(LDAPException.class)); - final LDAPException expected = (LDAPException) e.getCause().getCause(); - assertThat(expected.getMessage(), anyOf(containsString("Hostname verification failed"), containsString("peer not authenticated"))); + String userName = "ironman"; + UncategorizedExecutionException e = expectThrows(UncategorizedExecutionException.class, + () -> session(sessionFactory, userName, SECURED_PASSWORD)); + assertThat(e.getCause(), instanceOf(ExecutionException.class)); + assertThat(e.getCause().getCause(), instanceOf(LDAPException.class)); + final LDAPException expected = (LDAPException) e.getCause().getCause(); + assertThat(expected.getMessage(), + anyOf(containsString("Hostname verification failed"), containsString("peer not authenticated"))); + } } public void testStandardLdapHostnameVerification() throws Exception { @@ -353,7 +378,8 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI .put(LdapTestCase.buildLdapSettings(AD_LDAP_URL, userTemplate, groupSearchBase, LdapSearchScope.SUB_TREE)) .put("ssl.verification_mode", VerificationMode.FULL) .build(); - RealmConfig config = new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + RealmConfig config = + new RealmConfig("ad-test", settings, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService); String user = "Bruce Banner"; @@ -365,7 +391,30 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI assertThat(expected.getMessage(), anyOf(containsString("Hostname verification failed"), containsString("peer not authenticated"))); } - Settings buildAdSettings(String ldapUrl, String adDomainName, boolean hostnameVerification) { + public void testADLookup() throws Exception { + RealmConfig config = new RealmConfig("ad-test", + buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false, true), + globalSettings, new ThreadContext(Settings.EMPTY)); + try (ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService)) { + + List users = randomSubsetOf(Arrays.asList("cap", "hawkeye", "hulk", "ironman", "thor", "blackwidow", + "cap@ad.test.elasticsearch.com", "hawkeye@ad.test.elasticsearch.com", "hulk@ad.test.elasticsearch.com", + "ironman@ad.test.elasticsearch.com", "thor@ad.test.elasticsearch.com", "blackwidow@ad.test.elasticsearch.com", + "ADES\\cap", "ADES\\hawkeye", "ADES\\hulk", "ADES\\ironman", "ADES\\thor", "ADES\\blackwidow")); + for (String user : users) { + try (LdapSession ldap = unauthenticatedSession(sessionFactory, user)) { + assertNotNull("ldap session was null for user " + user, ldap); + assertThat("group avenger test for user " + user, groups(ldap), hasItem(containsString("Avengers"))); + } + } + } + } + + private Settings buildAdSettings(String ldapUrl, String adDomainName, boolean hostnameVerification) { + return buildAdSettings(ldapUrl, adDomainName, hostnameVerification, randomBoolean()); + } + + private Settings buildAdSettings(String ldapUrl, String adDomainName, boolean hostnameVerification, boolean useBindUser) { Settings.Builder builder = Settings.builder() .put(ActiveDirectorySessionFactory.URLS_SETTING, ldapUrl) .put(ActiveDirectorySessionFactory.AD_DOMAIN_NAME_SETTING, adDomainName); @@ -374,10 +423,23 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI } else { builder.put(ActiveDirectorySessionFactory.HOSTNAME_VERIFICATION_SETTING, hostnameVerification); } + if (useGlobalSSL == false) { builder.put("ssl.truststore.path", getDataPath("../ldap/support/ldaptrust.jks")) .put("ssl.truststore.password", "changeit"); } + + if (useBindUser) { + final String user = randomFrom("cap", "hawkeye", "hulk", "ironman", "thor", "blackwidow", "cap@ad.test.elasticsearch.com", + "hawkeye@ad.test.elasticsearch.com", "hulk@ad.test.elasticsearch.com", "ironman@ad.test.elasticsearch.com", + "thor@ad.test.elasticsearch.com", "blackwidow@ad.test.elasticsearch.com", "ADES\\cap", "ADES\\hawkeye", "ADES\\hulk", + "ADES\\ironman", "ADES\\thor", "ADES\\blackwidow", "CN=Bruce Banner,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"); + final boolean poolingEnabled = randomBoolean(); + builder.put("bind_dn", user) + .put("bind_password", PASSWORD) + .put("user_search.pool.enabled", poolingEnabled); + logger.info("using bind user [{}] with pooling enabled [{}]", user, poolingEnabled); + } return builder.build(); } @@ -387,6 +449,12 @@ public class ActiveDirectorySessionFactoryTests extends AbstractActiveDirectoryI return future.actionGet(); } + private LdapSession unauthenticatedSession(SessionFactory factory, String username) { + PlainActionFuture future = new PlainActionFuture<>(); + factory.unauthenticatedSession(username, future); + return future.actionGet(); + } + private List groups(LdapSession ldapSession) { PlainActionFuture> future = new PlainActionFuture<>(); ldapSession.groups(future); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java index 787cae5e295..c1074d55f36 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java @@ -12,7 +12,10 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.xpack.security.authc.ldap.support.LdapTestCase; @@ -21,9 +24,6 @@ import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRea import org.elasticsearch.xpack.security.authc.support.DnRoleMapper; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.security.user.User; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.ssl.SSLService; import org.elasticsearch.xpack.ssl.VerificationMode; import org.junit.After; @@ -45,7 +45,6 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -86,9 +85,11 @@ public class LdapRealmTests extends LdapTestCase { LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); - PlainActionFuture future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); + final AuthenticationResult result = future.actionGet(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + User user = result.getUser(); assertThat(user, notNullValue()); assertThat(user.roles(), arrayContaining("HMS Victory")); assertThat(user.metadata(), notNullValue()); @@ -109,9 +110,11 @@ public class LdapRealmTests extends LdapTestCase { LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); - PlainActionFuture future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); + final AuthenticationResult result = future.actionGet(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + User user = result.getUser(); assertThat(user, notNullValue()); assertThat("For roles " + Arrays.toString(user.roles()), user.roles(), arrayContaining("HMS Victory")); assertThat(user.metadata(), notNullValue()); @@ -132,12 +135,14 @@ public class LdapRealmTests extends LdapTestCase { ldapFactory = spy(ldapFactory); LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); - PlainActionFuture future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); - future.actionGet(); + + PlainActionFuture future = new PlainActionFuture<>(); + ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); + assertThat(future.actionGet().getStatus(), is(AuthenticationResult.Status.SUCCESS)); + future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); - future.actionGet(); + ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); + assertThat(future.actionGet().getStatus(), is(AuthenticationResult.Status.SUCCESS)); //verify one and only one session -> caching is working verify(ldapFactory, times(1)).session(anyString(), any(SecureString.class), any(ActionListener.class)); @@ -155,11 +160,11 @@ public class LdapRealmTests extends LdapTestCase { DnRoleMapper roleMapper = buildGroupAsRoleMapper(resourceWatcherService); ldapFactory = spy(ldapFactory); LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, roleMapper, threadPool); - PlainActionFuture future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); + PlainActionFuture future = new PlainActionFuture<>(); + ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); future.actionGet(); future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); + ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); future.actionGet(); //verify one and only one session -> caching is working @@ -168,7 +173,7 @@ public class LdapRealmTests extends LdapTestCase { roleMapper.notifyRefresh(); future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); + ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); future.actionGet(); //we need to session again @@ -188,11 +193,11 @@ public class LdapRealmTests extends LdapTestCase { ldapFactory = spy(ldapFactory); LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); - PlainActionFuture future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); + PlainActionFuture future = new PlainActionFuture<>(); + ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); future.actionGet(); future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); + ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); future.actionGet(); //verify two and only two binds -> caching is disabled @@ -230,7 +235,7 @@ public class LdapRealmTests extends LdapTestCase { try { assertThat(sessionFactory, is(instanceOf(LdapUserSearchSessionFactory.class))); } finally { - ((LdapUserSearchSessionFactory)sessionFactory).shutdown(); + ((LdapUserSearchSessionFactory)sessionFactory).close(); } } @@ -280,11 +285,13 @@ public class LdapRealmTests extends LdapTestCase { LdapSessionFactory ldapFactory = new LdapSessionFactory(config, sslService); LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, - new DnRoleMapper(LdapRealm.LDAP_TYPE, config, resourceWatcherService), threadPool); + new DnRoleMapper(config, resourceWatcherService), threadPool); - PlainActionFuture future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken("Horatio Hornblower", new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + ldap.authenticate(new UsernamePasswordToken("Horatio Hornblower", new SecureString(PASSWORD)), future); + final AuthenticationResult result = future.actionGet(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + User user = result.getUser(); assertThat(user, notNullValue()); assertThat(user.roles(), arrayContaining("avenger")); } @@ -306,10 +313,14 @@ public class LdapRealmTests extends LdapTestCase { LdapRealm ldap = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); - PlainActionFuture future = new PlainActionFuture<>(); - ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future, mock(IncomingRequest.class)); - User user = future.actionGet(); - assertThat(user, nullValue()); + PlainActionFuture future = new PlainActionFuture<>(); + ldap.authenticate(new UsernamePasswordToken(VALID_USERNAME, new SecureString(PASSWORD)), future); + final AuthenticationResult result = future.actionGet(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.CONTINUE)); + assertThat(result.getUser(), nullValue()); + assertThat(result.getMessage(), is("authenticate failed")); + assertThat(result.getException(), notNullValue()); + assertThat(result.getException().getMessage(), containsString("UnknownHostException")); } public void testUsageStats() throws Exception { @@ -335,7 +346,7 @@ public class LdapRealmTests extends LdapTestCase { LdapSessionFactory ldapFactory = new LdapSessionFactory(config, sslService); LdapRealm realm = new LdapRealm(LdapRealm.LDAP_TYPE, config, ldapFactory, - new DnRoleMapper(LdapRealm.LDAP_TYPE, config, resourceWatcherService), threadPool); + new DnRoleMapper(config, resourceWatcherService), threadPool); Map stats = realm.usageStats(); assertThat(stats, is(notNullValue())); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java index cf41ff9da4d..2b52e6f0f0c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -43,7 +44,6 @@ import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isEmptyString; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; public class LdapUserSearchSessionFactoryTests extends LdapTestCase { @@ -75,20 +75,31 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { } public void testSupportsUnauthenticatedSessions() throws Exception { - RealmConfig config = new RealmConfig("ldap_realm", Settings.builder() - .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, "", LdapSearchScope.SUB_TREE)) - .put("user_search.base_dn", "") - .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") - .put("bind_password", "pass") - .put("user_search.attribute", "cn") - .put("user_search.pool.enabled", randomBoolean()) - .build(), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + final boolean useAttribute = randomBoolean(); + Settings.Builder builder = Settings.builder() + .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, "", LdapSearchScope.SUB_TREE)) + .put("user_search.base_dn", "") + .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") + .put("bind_password", "pass") + .put("user_search.pool.enabled", randomBoolean()); + if (useAttribute) { + builder.put("user_search.attribute", "cn"); + } else { + builder.put("user_search.filter", "(cn={0})"); + } + + RealmConfig config = new RealmConfig("ldap_realm", builder.build(), globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); try { assertThat(sessionFactory.supportsUnauthenticatedSession(), is(true)); } finally { - sessionFactory.shutdown(); + sessionFactory.close(); + } + + if (useAttribute) { + assertSettingDeprecationsAndWarnings(new Setting[] { LdapUserSearchSessionFactory.SEARCH_ATTRIBUTE }); } } @@ -96,14 +107,20 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { String groupSearchBase = "o=sevenSeas"; String userSearchBase = "o=sevenSeas"; - RealmConfig config = new RealmConfig("ldap_realm", Settings.builder() - .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) - .put("user_search.base_dn", userSearchBase) - .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") - .put("bind_password", "pass") - .put("user_search.attribute", "cn") - .put("user_search.pool.enabled", randomBoolean()) - .build(), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + final boolean useAttribute = randomBoolean(); + Settings.Builder builder = Settings.builder() + .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) + .put("user_search.base_dn", userSearchBase) + .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") + .put("bind_password", "pass") + .put("user_search.pool.enabled", randomBoolean()); + if (useAttribute) { + builder.put("user_search.attribute", "cn"); + } else { + builder.put("user_search.filter", "(cn={0})"); + } + RealmConfig config = new RealmConfig("ldap_realm", builder.build(), globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); @@ -123,7 +140,11 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { assertThat(dn, containsString(user)); } } finally { - sessionFactory.shutdown(); + sessionFactory.close(); + } + + if (useAttribute) { + assertSettingDeprecationsAndWarnings(new Setting[] { LdapUserSearchSessionFactory.SEARCH_ATTRIBUTE }); } } @@ -131,15 +152,21 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { String groupSearchBase = "o=sevenSeas"; String userSearchBase = "o=sevenSeas"; - RealmConfig config = new RealmConfig("ldap_realm", Settings.builder() - .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) - .put("user_search.base_dn", userSearchBase) - .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") - .put("bind_password", "pass") - .put("user_search.scope", LdapSearchScope.BASE) - .put("user_search.attribute", "cn") - .put("user_search.pool.enabled", randomBoolean()) - .build(), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + final boolean useAttribute = randomBoolean(); + Settings.Builder builder = Settings.builder() + .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) + .put("user_search.base_dn", userSearchBase) + .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") + .put("bind_password", "pass") + .put("user_search.scope", LdapSearchScope.BASE) + .put("user_search.pool.enabled", randomBoolean()); + if (useAttribute) { + builder.put("user_search.attribute", "cn"); + } else { + builder.put("user_search.filter", "(cn={0})"); + } + RealmConfig config = new RealmConfig("ldap_realm", builder.build(), globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); @@ -150,7 +177,11 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { assertNull(session(sessionFactory, user, userPass)); assertNull(unauthenticatedSession(sessionFactory, user)); } finally { - sessionFactory.shutdown(); + sessionFactory.close(); + } + + if (useAttribute) { + assertSettingDeprecationsAndWarnings(new Setting[] { LdapUserSearchSessionFactory.SEARCH_ATTRIBUTE }); } } @@ -158,15 +189,21 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { String groupSearchBase = "o=sevenSeas"; String userSearchBase = "cn=William Bush,ou=people,o=sevenSeas"; - RealmConfig config = new RealmConfig("ldap_realm", Settings.builder() - .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) - .put("user_search.base_dn", userSearchBase) - .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") - .put("bind_password", "pass") - .put("user_search.scope", LdapSearchScope.BASE) - .put("user_search.attribute", "cn") - .put("user_search.pool.enabled", randomBoolean()) - .build(), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + Settings.Builder builder = Settings.builder() + .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) + .put("user_search.base_dn", userSearchBase) + .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") + .put("bind_password", "pass") + .put("user_search.scope", LdapSearchScope.BASE) + .put("user_search.pool.enabled", randomBoolean()); + final boolean useAttribute = randomBoolean(); + if (useAttribute) { + builder.put("user_search.attribute", "cn"); + } else { + builder.put("user_search.filter", "(cn={0})"); + } + RealmConfig config = new RealmConfig("ldap_realm", builder.build(), globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); @@ -186,7 +223,11 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { assertThat(dn, containsString(user)); } } finally { - sessionFactory.shutdown(); + sessionFactory.close(); + } + + if (useAttribute) { + assertSettingDeprecationsAndWarnings(new Setting[] { LdapUserSearchSessionFactory.SEARCH_ATTRIBUTE }); } } @@ -194,15 +235,21 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { String groupSearchBase = "o=sevenSeas"; String userSearchBase = "o=sevenSeas"; - RealmConfig config = new RealmConfig("ldap_realm", Settings.builder() - .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) - .put("user_search.base_dn", userSearchBase) - .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") - .put("bind_password", "pass") - .put("user_search.scope", LdapSearchScope.ONE_LEVEL) - .put("user_search.attribute", "cn") - .put("user_search.pool.enabled", randomBoolean()) - .build(), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + Settings.Builder builder = Settings.builder() + .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) + .put("user_search.base_dn", userSearchBase) + .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") + .put("bind_password", "pass") + .put("user_search.scope", LdapSearchScope.ONE_LEVEL) + .put("user_search.pool.enabled", randomBoolean()); + final boolean useAttribute = randomBoolean(); + if (useAttribute) { + builder.put("user_search.attribute", "cn"); + } else { + builder.put("user_search.filter", "(cn={0})"); + } + RealmConfig config = new RealmConfig("ldap_realm", builder.build(), globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); @@ -213,7 +260,11 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { assertNull(session(sessionFactory, user, userPass)); assertNull(unauthenticatedSession(sessionFactory, user)); } finally { - sessionFactory.shutdown(); + sessionFactory.close(); + } + + if (useAttribute) { + assertSettingDeprecationsAndWarnings(new Setting[] { LdapUserSearchSessionFactory.SEARCH_ATTRIBUTE }); } } @@ -221,15 +272,21 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { String groupSearchBase = "o=sevenSeas"; String userSearchBase = "ou=people,o=sevenSeas"; - RealmConfig config = new RealmConfig("ldap_realm", Settings.builder() - .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) - .put("user_search.base_dn", userSearchBase) - .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") - .put("bind_password", "pass") - .put("user_search.scope", LdapSearchScope.ONE_LEVEL) - .put("user_search.attribute", "cn") - .put("user_search.pool.enabled", randomBoolean()) - .build(), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + Settings.Builder builder = Settings.builder() + .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) + .put("user_search.base_dn", userSearchBase) + .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") + .put("bind_password", "pass") + .put("user_search.scope", LdapSearchScope.ONE_LEVEL) + .put("user_search.pool.enabled", randomBoolean()); + final boolean useAttribute = randomBoolean(); + if (useAttribute) { + builder.put("user_search.attribute", "cn"); + } else { + builder.put("user_search.filter", "(cn={0})"); + } + RealmConfig config = new RealmConfig("ldap_realm", builder.build(), globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); @@ -249,7 +306,11 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { assertThat(dn, containsString(user)); } } finally { - sessionFactory.shutdown(); + sessionFactory.close(); + } + + if (useAttribute) { + assertSettingDeprecationsAndWarnings(new Setting[] { LdapUserSearchSessionFactory.SEARCH_ATTRIBUTE }); } } @@ -257,14 +318,20 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { String groupSearchBase = "o=sevenSeas"; String userSearchBase = "o=sevenSeas"; - RealmConfig config = new RealmConfig("ldap_realm", Settings.builder() - .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) - .put("user_search.base_dn", userSearchBase) - .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") - .put("bind_password", "pass") - .put("user_search.attribute", "uid1") - .put("user_search.pool.enabled", randomBoolean()) - .build(), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); + Settings.Builder builder = Settings.builder() + .put(buildLdapSettings(ldapUrls(), Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.SUB_TREE)) + .put("user_search.base_dn", userSearchBase) + .put("bind_dn", "cn=Horatio Hornblower,ou=people,o=sevenSeas") + .put("bind_password", "pass") + .put("user_search.pool.enabled", randomBoolean()); + final boolean useAttribute = randomBoolean(); + if (useAttribute) { + builder.put("user_search.attribute", "uid1"); + } else { + builder.put("user_search.filter", "(uid1={0})"); + } + RealmConfig config = new RealmConfig("ldap_realm", builder.build(), globalSettings, new Environment(globalSettings), + new ThreadContext(globalSettings)); LdapUserSearchSessionFactory sessionFactory = new LdapUserSearchSessionFactory(config, sslService); @@ -275,7 +342,11 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { assertNull(session(sessionFactory, user, userPass)); assertNull(unauthenticatedSession(sessionFactory, user)); } finally { - sessionFactory.shutdown(); + sessionFactory.close(); + } + + if (useAttribute) { + assertSettingDeprecationsAndWarnings(new Setting[] { LdapUserSearchSessionFactory.SEARCH_ATTRIBUTE }); } } @@ -309,7 +380,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { assertThat(dn, containsString("William Bush")); } } finally { - sessionFactory.shutdown(); + sessionFactory.close(); } } @@ -363,7 +434,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { containsString("Philanthropists"))); } } finally { - sessionFactory.shutdown(); + sessionFactory.close(); } } @@ -407,7 +478,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { } } } finally { - sessionFactory.shutdown(); + sessionFactory.close(); } } @@ -422,7 +493,9 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .build(), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); LDAPConnectionPool connectionPool = LdapUserSearchSessionFactory.createConnectionPool(config, new SingleServerSet("localhost", - randomFrom(ldapServers).getListenPort()), TimeValue.timeValueSeconds(5), NoOpLogger.INSTANCE); + randomFrom(ldapServers).getListenPort()), TimeValue.timeValueSeconds(5), NoOpLogger.INSTANCE, + () -> new SimpleBindRequest("cn=Horatio Hornblower,ou=people,o=sevenSeas", "pass"), + () -> "cn=Horatio Hornblower,ou=people,o=sevenSeas"); try { assertThat(connectionPool.getCurrentAvailableConnections(), is(LdapUserSearchSessionFactory.DEFAULT_CONNECTION_POOL_INITIAL_SIZE)); @@ -451,7 +524,9 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { .build(), globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings)); LDAPConnectionPool connectionPool = LdapUserSearchSessionFactory.createConnectionPool(config, new SingleServerSet("localhost", - randomFrom(ldapServers).getListenPort()), TimeValue.timeValueSeconds(5), NoOpLogger.INSTANCE); + randomFrom(ldapServers).getListenPort()), TimeValue.timeValueSeconds(5), NoOpLogger.INSTANCE, + () -> new SimpleBindRequest("cn=Horatio Hornblower,ou=people,o=sevenSeas", "pass"), + () -> "cn=Horatio Hornblower,ou=people,o=sevenSeas"); try { assertThat(connectionPool.getCurrentAvailableConnections(), is(10)); assertThat(connectionPool.getMaximumAvailableConnections(), is(12)); @@ -476,7 +551,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { searchSessionFactory = new LdapUserSearchSessionFactory(config, sslService); } finally { if (searchSessionFactory != null) { - searchSessionFactory.shutdown(); + searchSessionFactory.close(); } } } @@ -499,7 +574,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { future.get(); } finally { if (searchSessionFactory != null) { - searchSessionFactory.shutdown(); + searchSessionFactory.close(); } } } @@ -548,8 +623,10 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { searchSessionFactory = new LdapUserSearchSessionFactory(config, sslService); } finally { if (searchSessionFactory != null) { - searchSessionFactory.shutdown(); + searchSessionFactory.close(); } } + + assertSettingDeprecationsAndWarnings(new Setting[] { LdapUserSearchSessionFactory.SEARCH_ATTRIBUTE }); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverInMemoryTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverInMemoryTests.java index 3e884132d61..c1e5e136739 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverInMemoryTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverInMemoryTests.java @@ -21,11 +21,14 @@ import org.elasticsearch.xpack.security.authc.ldap.support.LdapTestCase; import org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils; import org.junit.After; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; public class SearchGroupsResolverInMemoryTests extends LdapTestCase { + private static final String WILLIAM_BUSH = "cn=William Bush,ou=people,o=sevenSeas"; private LDAPConnection connection; @After @@ -53,11 +56,7 @@ public class SearchGroupsResolverInMemoryTests extends LdapTestCase { .build(); final SearchGroupsResolver resolver = new SearchGroupsResolver(settings); final PlainActionFuture> future = new PlainActionFuture<>(); - resolver.resolve(connection, - "cn=William Bush,ou=people,o=sevenSeas", - TimeValue.timeValueSeconds(30), - logger, - null, future); + resolver.resolve(connection, WILLIAM_BUSH, TimeValue.timeValueSeconds(30), logger, null, future); final ExecutionException exception = expectThrows(ExecutionException.class, future::get); final Throwable cause = exception.getCause(); @@ -65,6 +64,53 @@ public class SearchGroupsResolverInMemoryTests extends LdapTestCase { assertThat(((LDAPException) cause).getResultCode(), is(ResultCode.TIMEOUT)); } + /** + * Tests searching for groups when the "user_attribute" field is not set + */ + public void testResolveWithDefaultUserAttribute() throws Exception { + connect(new LDAPConnectionOptions()); + + Settings settings = Settings.builder() + .put("group_search.base_dn", "ou=groups,o=sevenSeas") + .put("group_search.scope", LdapSearchScope.SUB_TREE) + .build(); + + final List groups = resolveGroups(settings, WILLIAM_BUSH); + assertThat(groups, iterableWithSize(1)); + assertThat(groups.get(0), containsString("HMS Lydia")); + } + + /** + * Tests searching for groups when the "user_attribute" field is set to "dn" (which is special) + */ + public void testResolveWithExplicitDnAttribute() throws Exception { + connect(new LDAPConnectionOptions()); + + Settings settings = Settings.builder() + .put("group_search.base_dn", "ou=groups,o=sevenSeas") + .put("group_search.user_attribute", "dn") + .build(); + + final List groups = resolveGroups(settings, WILLIAM_BUSH); + assertThat(groups, iterableWithSize(1)); + assertThat(groups.get(0), containsString("HMS Lydia")); + } + + /** + * Tests searching for groups when the "user_attribute" field is set to a missing value + */ + public void testResolveWithMissingAttribute() throws Exception { + connect(new LDAPConnectionOptions()); + + Settings settings = Settings.builder() + .put("group_search.base_dn", "ou=groups,o=sevenSeas") + .put("group_search.user_attribute", "no-such-attribute") + .build(); + + final List groups = resolveGroups(settings, WILLIAM_BUSH); + assertThat(groups, iterableWithSize(0)); + } + private void connect(LDAPConnectionOptions options) throws LDAPException { if (connection != null) { throw new IllegalStateException("Already connected (" + connection.getConnectionName() + ' ' @@ -74,4 +120,11 @@ public class SearchGroupsResolverInMemoryTests extends LdapTestCase { this.connection = LdapUtils.privilegedConnect(() -> new LDAPConnection(options, ldapurl.getHost(), ldapurl.getPort())); } -} \ No newline at end of file + private List resolveGroups(Settings settings, String userDn) { + final SearchGroupsResolver resolver = new SearchGroupsResolver(settings); + final PlainActionFuture> future = new PlainActionFuture<>(); + resolver.resolve(connection, userDn, TimeValue.timeValueSeconds(30), logger, null, future); + return future.actionGet(); + } + +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java index 7cbfeac3933..04e09870e9f 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapTestCase.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.security.authc.RealmConfig; -import org.elasticsearch.xpack.security.authc.ldap.LdapRealm; import org.elasticsearch.xpack.security.authc.ldap.LdapSessionFactory; import org.elasticsearch.xpack.security.authc.support.DnRoleMapper; import org.elasticsearch.test.ESTestCase; @@ -138,7 +137,7 @@ public abstract class LdapTestCase extends ESTestCase { Settings global = Settings.builder().put("path.home", createTempDir()).build(); RealmConfig config = new RealmConfig("ldap1", settings, global, new ThreadContext(Settings.EMPTY)); - return new DnRoleMapper(LdapRealm.LDAP_TYPE, config, resourceWatcherService); + return new DnRoleMapper(config, resourceWatcherService); } protected LdapSession session(SessionFactory factory, String username, SecureString password) { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java index 86403e49b3e..a68f0806bc0 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; @@ -104,9 +104,11 @@ public class PkiRealmTests extends ESTestCase { return null; }).when(roleMapper).resolveRoles(any(UserRoleMapper.UserData.class), any(ActionListener.class)); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(token, future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(token, future); + final AuthenticationResult result = future.actionGet(); + assertThat(result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + User user = result.getUser(); assertThat(user, is(notNullValue())); assertThat(user.principal(), is("Elasticsearch Test Node")); assertThat(user.roles(), is(notNullValue())); @@ -128,9 +130,9 @@ public class PkiRealmTests extends ESTestCase { threadContext.putTransient(PkiRealm.PKI_CERT_HEADER_NAME, new X509Certificate[] { certificate }); X509AuthenticationToken token = realm.token(threadContext); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(token, future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(token, future); + User user = future.actionGet().getUser(); assertThat(user, is(notNullValue())); assertThat(user.principal(), is("elasticsearch")); assertThat(user.roles(), is(notNullValue())); @@ -159,9 +161,9 @@ public class PkiRealmTests extends ESTestCase { threadContext.putTransient(PkiRealm.PKI_CERT_HEADER_NAME, new X509Certificate[] { certificate }); X509AuthenticationToken token = realm.token(threadContext); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(token, future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(token, future); + User user = future.actionGet().getUser(); assertThat(user, is(notNullValue())); assertThat(user.principal(), is("Elasticsearch Test Node")); assertThat(user.roles(), is(notNullValue())); @@ -190,9 +192,9 @@ public class PkiRealmTests extends ESTestCase { threadContext.putTransient(PkiRealm.PKI_CERT_HEADER_NAME, new X509Certificate[] { certificate }); X509AuthenticationToken token = realm.token(threadContext); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(token, future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(token, future); + User user = future.actionGet().getUser(); assertThat(user, is(nullValue())); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java index 43c2565532e..27b57acd3a5 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/CachingUsernamePasswordRealmTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSource; -import org.elasticsearch.xpack.security.authc.IncomingRequest; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.Realm; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.user.User; @@ -58,8 +58,8 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { RealmConfig config = new RealmConfig("test_realm", settings, globalSettings, new ThreadContext(Settings.EMPTY)); CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm("test", config) { @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { - listener.onResponse(new User("username", new String[]{"r1", "r2", "r3"})); + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { + listener.onResponse(AuthenticationResult.success(new User("username", new String[]{"r1", "r2", "r3"}))); } @Override @@ -74,26 +74,26 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { public void testAuthCache() { AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings); SecureString pass = new SecureString("pass"); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("a", pass), future, mock(IncomingRequest.class)); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("a", pass), future); future.actionGet(); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("b", pass), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken("b", pass), future); future.actionGet(); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("c", pass), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken("c", pass), future); future.actionGet(); assertThat(realm.authInvocationCounter.intValue(), is(3)); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("a", pass), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken("a", pass), future); future.actionGet(); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("b", pass), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken("b", pass), future); future.actionGet(); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("c", pass), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken("c", pass), future); future.actionGet(); assertThat(realm.authInvocationCounter.intValue(), is(3)); @@ -130,33 +130,37 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { public void testLookupAndAuthCache() { AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings); // lookup first - PlainActionFuture future = new PlainActionFuture<>(); - realm.lookupUser("a", future); - User lookedUp = future.actionGet(); + PlainActionFuture lookupFuture = new PlainActionFuture<>(); + realm.lookupUser("a", lookupFuture); + User lookedUp = lookupFuture.actionGet(); assertThat(realm.lookupInvocationCounter.intValue(), is(1)); assertThat(realm.authInvocationCounter.intValue(), is(0)); assertThat(lookedUp.roles(), arrayContaining("lookupRole1", "lookupRole2")); // now authenticate - future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("a", new SecureString("pass")), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture authFuture = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("a", new SecureString("pass")), authFuture); + AuthenticationResult authResult = authFuture.actionGet(); + assertThat(authResult.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + User user = authResult.getUser(); assertThat(realm.lookupInvocationCounter.intValue(), is(1)); assertThat(realm.authInvocationCounter.intValue(), is(1)); assertThat(user.roles(), arrayContaining("testRole1", "testRole2")); assertThat(user, not(sameInstance(lookedUp))); // authenticate a different user first - future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("b", new SecureString("pass")), future, mock(IncomingRequest.class)); - user = future.actionGet(); + authFuture = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("b", new SecureString("pass")), authFuture); + authResult = authFuture.actionGet(); + assertThat(authResult.getStatus(), is(AuthenticationResult.Status.SUCCESS)); + user = authResult.getUser(); assertThat(realm.lookupInvocationCounter.intValue(), is(1)); assertThat(realm.authInvocationCounter.intValue(), is(2)); assertThat(user.roles(), arrayContaining("testRole1", "testRole2")); //now lookup b - future = new PlainActionFuture<>(); - realm.lookupUser("b", future); - lookedUp = future.actionGet(); + lookupFuture = new PlainActionFuture<>(); + realm.lookupUser("b", lookupFuture); + lookedUp = lookupFuture.actionGet(); assertThat(realm.lookupInvocationCounter.intValue(), is(1)); assertThat(realm.authInvocationCounter.intValue(), is(2)); assertThat(user, sameInstance(lookedUp)); @@ -169,20 +173,20 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { SecureString pass1 = new SecureString("pass"); SecureString pass2 = new SecureString("password"); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken(user, pass1), future, mock(IncomingRequest.class)); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken(user, pass1), future); future.actionGet(); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken(user, pass1), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken(user, pass1), future); future.actionGet(); assertThat(realm.authInvocationCounter.intValue(), is(1)); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken(user, pass2), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken(user, pass2), future); future.actionGet(); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken(user, pass2), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken(user, pass2), future); future.actionGet(); assertThat(realm.authInvocationCounter.intValue(), is(2)); @@ -195,24 +199,24 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { String user = "testUser"; SecureString password = new SecureString("password"); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken(user, password), future, mock(IncomingRequest.class)); - assertThat(future.actionGet().enabled(), equalTo(false)); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken(user, password), future); + assertThat(future.actionGet().getUser().enabled(), equalTo(false)); assertThat(realm.authInvocationCounter.intValue(), is(1)); realm.setUsersEnabled(true); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken(user, password), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken(user, password), future); future.actionGet(); - assertThat(future.actionGet().enabled(), equalTo(true)); + assertThat(future.actionGet().getUser().enabled(), equalTo(true)); assertThat(realm.authInvocationCounter.intValue(), is(2)); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken(user, password), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken(user, password), future); future.actionGet(); - assertThat(future.actionGet().enabled(), equalTo(true)); + assertThat(future.actionGet().getUser().enabled(), equalTo(true)); assertThat(realm.authInvocationCounter.intValue(), is(2)); } @@ -228,9 +232,9 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { final UsernamePasswordToken authToken = new UsernamePasswordToken("the-user", new SecureString("the-password")); // authenticate - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(authToken, future, mock(IncomingRequest.class)); - final User user1 = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(authToken, future); + final User user1 = future.actionGet().getUser(); assertThat(user1.roles(), arrayContaining("testRole1", "testRole2")); assertThat(realm.authInvocationCounter.intValue(), is(1)); @@ -238,8 +242,8 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { // authenticate future = new PlainActionFuture<>(); - realm.authenticate(authToken, future, mock(IncomingRequest.class)); - final User user2 = future.actionGet(); + realm.authenticate(authToken, future); + final User user2 = future.actionGet().getUser(); assertThat(user2.roles(), arrayContaining("testRole1", "testRole2")); assertThat(user2, not(sameInstance(user1))); assertThat(realm.authInvocationCounter.intValue(), is(2)); @@ -254,31 +258,31 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(config); final UsernamePasswordToken authToken = new UsernamePasswordToken("the-user", new SecureString("the-password")); - PlainActionFuture future = new PlainActionFuture<>(); + PlainActionFuture future = new PlainActionFuture<>(); // authenticate - realm.authenticate(authToken, future, mock(IncomingRequest.class)); + realm.authenticate(authToken, future); final long start = System.currentTimeMillis(); - final User user1 = future.actionGet(); + final User user1 = future.actionGet().getUser(); assertThat(realm.authInvocationCounter.intValue(), is(1)); // After 100 ms (from the original start time), authenticate (read from cache). We don't care about the result sleepUntil(start + 100); future = new PlainActionFuture<>(); - realm.authenticate(authToken, future, mock(IncomingRequest.class)); + realm.authenticate(authToken, future); future.actionGet(); // After 200 ms (from the original start time), authenticate (read from cache). We don't care about the result sleepUntil(start + 200); future = new PlainActionFuture<>(); - realm.authenticate(authToken, future, mock(IncomingRequest.class)); + realm.authenticate(authToken, future); future.actionGet(); // After 300 ms (from the original start time), authenticate again. The cache entry should have expired (despite the previous reads) sleepUntil(start + 300); future = new PlainActionFuture<>(); - realm.authenticate(authToken, future, mock(IncomingRequest.class)); - final User user2 = future.actionGet(); + realm.authenticate(authToken, future); + final User user2 = future.actionGet().getUser(); assertThat(user2, not(sameInstance(user1))); // Due to slow VMs etc, the cache might have expired more than once during the test, but we can accept that. // We have other tests that verify caching works - this test just checks that it expires even when there are repeated reads. @@ -294,14 +298,14 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { public void testAuthenticateContract() throws Exception { Realm realm = new FailingAuthenticationRealm(Settings.EMPTY, globalSettings); - PlainActionFuture future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user", new SecureString("pass")), future, mock(IncomingRequest.class)); - User user = future.actionGet(); + PlainActionFuture future = new PlainActionFuture<>(); + realm.authenticate(new UsernamePasswordToken("user", new SecureString("pass")), future); + User user = future.actionGet().getUser(); assertThat(user, nullValue()); realm = new ThrowingAuthenticationRealm(Settings.EMPTY, globalSettings); future = new PlainActionFuture<>(); - realm.authenticate(new UsernamePasswordToken("user", new SecureString("pass")), future, mock(IncomingRequest.class)); + realm.authenticate(new UsernamePasswordToken("user", new SecureString("pass")), future); RuntimeException e = expectThrows(RuntimeException.class, future::actionGet); assertThat(e.getMessage(), containsString("whatever exception")); } @@ -329,12 +333,12 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { RealmConfig config = new RealmConfig("test_realm", Settings.EMPTY, globalSettings, new ThreadContext(Settings.EMPTY)); final CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm("test", config) { @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { // do something slow if (BCrypt.checkpw(token.credentials(), passwordHash)) { - listener.onResponse(new User(username, new String[]{"r1", "r2", "r3"})); + listener.onResponse(AuthenticationResult.success(new User(username, new String[]{"r1", "r2", "r3"}))); } else { - listener.onResponse(null); + listener.onResponse(AuthenticationResult.unsuccessful("Incorrect password", null)); } } @@ -359,16 +363,16 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { for (int i = 0; i < numberOfIterations; i++) { UsernamePasswordToken token = new UsernamePasswordToken(username, invalidPassword ? randomPassword : password); - realm.authenticate(token, ActionListener.wrap((user) -> { - if (invalidPassword && user != null) { - throw new RuntimeException("invalid password led to an authenticated user: " + user.toString()); - } else if (invalidPassword == false && user == null) { - throw new RuntimeException("proper password led to a null user!"); + realm.authenticate(token, ActionListener.wrap((result) -> { + if (invalidPassword && result.isAuthenticated()) { + throw new RuntimeException("invalid password led to an authenticated user: " + result); + } else if (invalidPassword == false && result.isAuthenticated() == false) { + throw new RuntimeException("proper password led to an unauthenticated result: " + result); } }, (e) -> { logger.error("caught exception", e); - fail("unexpected exception"); - }), mock(IncomingRequest.class)); + fail("unexpected exception - " + e); + })); } } catch (InterruptedException e) { @@ -392,7 +396,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { RealmConfig config = new RealmConfig("test_realm", Settings.EMPTY, globalSettings, new ThreadContext(Settings.EMPTY)); final CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm("test", config) { @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { listener.onFailure(new UnsupportedOperationException("authenticate should not be called!")); } @@ -446,8 +450,8 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { } @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { - listener.onResponse(null); + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { + listener.onResponse(AuthenticationResult.notHandled()); } @Override @@ -463,7 +467,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { } @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { listener.onFailure(new RuntimeException("whatever exception")); } @@ -493,10 +497,10 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { } @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { authInvocationCounter.incrementAndGet(); final User user = new User(token.principal(), new String[]{"testRole1", "testRole2"}, null, null, emptyMap(), usersEnabled); - listener.onResponse(user); + listener.onResponse(AuthenticationResult.success(user)); } @Override @@ -516,9 +520,9 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase { } @Override - protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener, IncomingRequest incomingRequest) { + protected void doAuthenticate(UsernamePasswordToken token, ActionListener listener) { authInvocationCounter.incrementAndGet(); - listener.onResponse(new User(token.principal(), new String[]{"testRole1", "testRole2"})); + listener.onResponse(AuthenticationResult.success(new User(token.principal(), new String[]{"testRole1", "testRole2"}))); } @Override diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java index 03eef18dc03..7e385fffbc3 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java @@ -5,22 +5,6 @@ */ package org.elasticsearch.xpack.security.authc.support; -import com.unboundid.ldap.sdk.DN; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger; -import org.elasticsearch.xpack.security.authc.RealmConfig; -import org.elasticsearch.xpack.security.authc.ldap.LdapRealm; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.TestThreadPool; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.junit.After; -import org.junit.Before; - import java.io.BufferedWriter; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -36,14 +20,33 @@ import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import com.unboundid.ldap.sdk.DN; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.security.audit.logfile.CapturingLogger; +import org.elasticsearch.xpack.security.authc.RealmConfig; +import org.junit.After; +import org.junit.Before; + import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; public class DnRoleMapperTests extends ESTestCase { @@ -186,6 +189,7 @@ public class DnRoleMapperTests extends ESTestCase { public void testAddNullListener() throws Exception { Path file = env.configFile().resolve("test_role_mapping.yml"); + Files.write(file, Collections.singleton("")); ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool); DnRoleMapper mapper = createMapper(file, watcherService); NullPointerException e = expectThrows(NullPointerException.class, () -> mapper.addListener(null)); @@ -195,7 +199,7 @@ public class DnRoleMapperTests extends ESTestCase { public void testParseFile() throws Exception { Path file = getDataPath("role_mapping.yml"); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); - Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name"); + Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); assertThat(mappings, notNullValue()); assertThat(mappings.size(), is(3)); @@ -225,7 +229,7 @@ public class DnRoleMapperTests extends ESTestCase { Path file = createTempDir().resolve("foo.yaml"); Files.createFile(file); Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG); - Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name"); + Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); assertThat(mappings, notNullValue()); assertThat(mappings.isEmpty(), is(true)); List events = CapturingLogger.output(logger.getName(), Level.DEBUG); @@ -236,9 +240,16 @@ public class DnRoleMapperTests extends ESTestCase { public void testParseFile_WhenFileDoesNotExist() throws Exception { Path file = createTempDir().resolve(randomAlphaOfLength(10)); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); - Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name"); + Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); assertThat(mappings, notNullValue()); assertThat(mappings.isEmpty(), is(true)); + + final ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> { + DnRoleMapper.parseFile(file, logger, "_type", "_name", true); + }); + assertThat(exception.getMessage(), containsString(file.toString())); + assertThat(exception.getMessage(), containsString("does not exist")); + assertThat(exception.getMessage(), containsString("_name")); } public void testParseFile_WhenCannotReadFile() throws Exception { @@ -247,7 +258,7 @@ public class DnRoleMapperTests extends ESTestCase { Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO); try { - DnRoleMapper.parseFile(file, logger, "_type", "_name"); + DnRoleMapper.parseFile(file, logger, "_type", "_name", false); fail("expected a parse failure"); } catch (Exception e) { this.logger.info("expected", e); @@ -274,7 +285,7 @@ public class DnRoleMapperTests extends ESTestCase { .build(); RealmConfig config = new RealmConfig("ldap1", ldapSettings, settings, new ThreadContext(Settings.EMPTY)); - DnRoleMapper mapper = new DnRoleMapper(LdapRealm.LDAP_TYPE, config, new ResourceWatcherService(settings, threadPool)); + DnRoleMapper mapper = new DnRoleMapper(config, new ResourceWatcherService(settings, threadPool)); Set roles = mapper.resolveRoles("", Arrays.asList(STARK_GROUP_DNS)); @@ -286,9 +297,9 @@ public class DnRoleMapperTests extends ESTestCase { Settings ldapSettings = Settings.builder() .put(USE_UNMAPPED_GROUPS_AS_ROLES_SETTING_KEY, true) .build(); - RealmConfig config = new RealmConfig("ldap1", ldapSettings, settings, new ThreadContext(Settings.EMPTY));; + RealmConfig config = new RealmConfig("ldap1", ldapSettings, settings, new ThreadContext(Settings.EMPTY)); - DnRoleMapper mapper = new DnRoleMapper(LdapRealm.LDAP_TYPE, config, new ResourceWatcherService(settings, threadPool)); + DnRoleMapper mapper = new DnRoleMapper(config, new ResourceWatcherService(settings, threadPool)); Set roles = mapper.resolveRoles("", Arrays.asList(STARK_GROUP_DNS)); assertThat(roles, hasItems("genius", "billionaire", "playboy", "philanthropist", "shield", "avengers")); @@ -300,9 +311,9 @@ public class DnRoleMapperTests extends ESTestCase { .put(ROLE_MAPPING_FILE_SETTING, file.toAbsolutePath()) .put(USE_UNMAPPED_GROUPS_AS_ROLES_SETTING_KEY, false) .build(); - RealmConfig config = new RealmConfig("ldap-userdn-role", ldapSettings, settings, new ThreadContext(Settings.EMPTY));; + RealmConfig config = new RealmConfig("ldap-userdn-role", ldapSettings, settings, new ThreadContext(Settings.EMPTY)); - DnRoleMapper mapper = new DnRoleMapper(LdapRealm.LDAP_TYPE, config, new ResourceWatcherService(settings, threadPool)); + DnRoleMapper mapper = new DnRoleMapper(config, new ResourceWatcherService(settings, threadPool)); Set roles = mapper.resolveRoles("cn=Horatio Hornblower,ou=people,o=sevenSeas", Collections.emptyList()); assertThat(roles, hasItem("avenger")); @@ -313,6 +324,6 @@ public class DnRoleMapperTests extends ESTestCase { .put("files.role_mapping", file.toAbsolutePath()) .build(); RealmConfig config = new RealmConfig("ad-group-mapper-test", realmSettings, settings, env, new ThreadContext(Settings.EMPTY)); - return new DnRoleMapper(randomBoolean() ? LdapRealm.AD_TYPE : LdapRealm.LDAP_TYPE, config, watcherService); + return new DnRoleMapper(config, watcherService); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/RoleMappingFileBootstrapCheckTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/RoleMappingFileBootstrapCheckTests.java new file mode 100644 index 00000000000..5894076039b --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authc/support/RoleMappingFileBootstrapCheckTests.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authc.support; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; + +import org.elasticsearch.bootstrap.BootstrapCheck; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.authc.RealmConfig; +import org.junit.Before; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class RoleMappingFileBootstrapCheckTests extends ESTestCase { + + private static final String ROLE_MAPPING_FILE_SETTING = DnRoleMapper.ROLE_MAPPING_FILE_SETTING.getKey(); + + protected Settings settings; + + @Before + public void init() throws IOException { + settings = Settings.builder() + .put("resource.reload.interval.high", "100ms") + .put("path.home", createTempDir()) + .build(); + } + + public void testBootstrapCheckOfValidFile() { + Path file = getDataPath("role_mapping.yml"); + Settings ldapSettings = Settings.builder() + .put(ROLE_MAPPING_FILE_SETTING, file.toAbsolutePath()) + .build(); + RealmConfig config = new RealmConfig("ldap1", ldapSettings, settings, new ThreadContext(Settings.EMPTY)); + final BootstrapCheck check = RoleMappingFileBootstrapCheck.create(config); + assertThat(check, notNullValue()); + assertThat(check.alwaysEnforce(), equalTo(true)); + assertThat(check.check(), equalTo(false)); + } + + public void testBootstrapCheckOfMissingFile() { + final String fileName = randomAlphaOfLength(10); + Path file = createTempDir().resolve(fileName); + Settings ldapSettings = Settings.builder() + .put(ROLE_MAPPING_FILE_SETTING, file.toAbsolutePath()) + .build(); + RealmConfig config = new RealmConfig("the-realm-name", ldapSettings, settings, new ThreadContext(Settings.EMPTY)); + final BootstrapCheck check = RoleMappingFileBootstrapCheck.create(config); + assertThat(check, notNullValue()); + assertThat(check.alwaysEnforce(), equalTo(true)); + assertThat(check.check(), equalTo(true)); + assertThat(check.errorMessage(), containsString("the-realm-name")); + assertThat(check.errorMessage(), containsString(fileName)); + assertThat(check.errorMessage(), containsString("does not exist")); + } + + public void testBootstrapCheckWithInvalidYaml() throws IOException { + Path file = createTempFile("", ".yml"); + // writing in utf_16 should cause a parsing error as we try to read the file in utf_8 + Files.write(file, Collections.singletonList("junk"), StandardCharsets.UTF_16); + + Settings ldapSettings = Settings.builder() + .put(ROLE_MAPPING_FILE_SETTING, file.toAbsolutePath()) + .build(); + RealmConfig config = new RealmConfig("the-realm-name", ldapSettings, settings, new ThreadContext(Settings.EMPTY)); + final BootstrapCheck check = RoleMappingFileBootstrapCheck.create(config); + assertThat(check, notNullValue()); + assertThat(check.alwaysEnforce(), equalTo(true)); + assertThat(check.check(), equalTo(true)); + assertThat(check.errorMessage(), containsString("the-realm-name")); + assertThat(check.errorMessage(), containsString(file.toString())); + assertThat(check.errorMessage(), containsString("could not read")); + } + + public void testBootstrapCheckWithInvalidDn() throws IOException { + Path file = createTempFile("", ".yml"); + // A DN must have at least 1 '=' symbol + Files.write(file, Collections.singletonList("role: not-a-dn")); + + Settings ldapSettings = Settings.builder() + .put(ROLE_MAPPING_FILE_SETTING, file.toAbsolutePath()) + .build(); + RealmConfig config = new RealmConfig("the-realm-name", ldapSettings, settings, new ThreadContext(Settings.EMPTY)); + final BootstrapCheck check = RoleMappingFileBootstrapCheck.create(config); + assertThat(check, notNullValue()); + assertThat(check.alwaysEnforce(), equalTo(true)); + assertThat(check.check(), equalTo(true)); + assertThat(check.errorMessage(), containsString("the-realm-name")); + assertThat(check.errorMessage(), containsString(file.toString())); + assertThat(check.errorMessage(), containsString("invalid DN")); + assertThat(check.errorMessage(), containsString("not-a-dn")); + } +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 98e38474b43..98b62892ef0 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -349,25 +349,8 @@ public class AuthorizationServiceTests extends ESTestCase { verifyNoMoreInteractions(auditTrail); } - @AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/1217") - public void testElasticUserOnlyAuthorizedForChangePasswordRequestsInSetupMode() { - final User user = new ElasticUser(true, true); - final ChangePasswordRequest changePasswordrequest = new ChangePasswordRequestBuilder(mock(Client.class)) - .username(user.principal()).request(); - - authorize(createAuthentication(user), ChangePasswordAction.NAME, changePasswordrequest); - - verify(auditTrail).accessGranted(user, ChangePasswordAction.NAME, changePasswordrequest); - - Tuple request = randomCompositeRequest(); - assertThrowsAuthorizationException(() -> authorize(createAuthentication(user), request.v1(), request.v2()), - request.v1(), "elastic"); - - verify(auditTrail).accessDenied(user, request.v1(), request.v2()); - } - public void testElasticUserAuthorizedForNonChangePasswordRequestsWhenNotInSetupMode() { - final User user = new ElasticUser(true, false); + final User user = new ElasticUser(true); Tuple request = randomCompositeRequest(); authorize(createAuthentication(user), request.v1(), request.v2()); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldExtractorTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldExtractorTests.java index 92010da72f9..aca79f831d7 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldExtractorTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/FieldExtractorTests.java @@ -12,8 +12,8 @@ import org.apache.lucene.search.AssertingQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DisjunctionMaxQuery; +import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.DocValuesNumbersQuery; -import org.apache.lucene.search.FieldValueQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -103,7 +103,7 @@ public class FieldExtractorTests extends ESTestCase { public void testFieldValue() { Set fields = new HashSet<>(); - FieldExtractor.extractFields(new FieldValueQuery("foo"), fields); + FieldExtractor.extractFields(new DocValuesFieldExistsQuery("foo"), fields); assertEquals(asSet("foo"), fields); } @@ -135,7 +135,7 @@ public class FieldExtractorTests extends ESTestCase { public void testIndexOrDocValuesQuery() { Set fields = new HashSet<>(); Query supported = IntPoint.newExactQuery("foo", 42); - Query unsupported = NumericDocValuesField.newExactQuery("bar", 3); + Query unsupported = NumericDocValuesField.newSlowExactQuery("bar", 3); IndexOrDocValuesQuery query = new IndexOrDocValuesQuery(supported, supported); FieldExtractor.extractFields(query, fields); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 7424db98d03..cafe3fc5269 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.security.authz.store; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; @@ -31,11 +33,13 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.Function; import static org.elasticsearch.mock.orig.Mockito.times; import static org.elasticsearch.mock.orig.Mockito.verifyNoMoreInteractions; +import static org.elasticsearch.xpack.security.test.SecurityTestUtils.getClusterIndexHealth; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; @@ -209,6 +213,9 @@ public class CompositeRolesStoreTests extends ESTestCase { verify(reservedRolesStore, times(2)).roleDescriptors(); } verifyNoMoreInteractions(fileRolesStore, reservedRolesStore, nativeRolesStore); + + // force a cache clear + } public void testCustomRolesProviders() { @@ -430,6 +437,51 @@ public class CompositeRolesStoreTests extends ESTestCase { assertEquals(0, role.indices().groups().length); } + public void testCacheClearOnIndexHealthChange() { + final AtomicInteger numInvalidation = new AtomicInteger(0); + + CompositeRolesStore compositeRolesStore = new CompositeRolesStore( + Settings.EMPTY, mock(FileRolesStore.class), mock(NativeRolesStore.class), mock(ReservedRolesStore.class), + Collections.emptyList(), new ThreadContext(Settings.EMPTY), new XPackLicenseState()) { + @Override + public void invalidateAll() { + numInvalidation.incrementAndGet(); + } + }; + + int expectedInvalidation = 0; + // existing to no longer present + ClusterIndexHealth previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + ClusterIndexHealth currentHealth = null; + compositeRolesStore.onSecurityIndexHealthChange(previousHealth, currentHealth); + assertEquals(++expectedInvalidation, numInvalidation.get()); + + // doesn't exist to exists + previousHealth = null; + currentHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + compositeRolesStore.onSecurityIndexHealthChange(previousHealth, currentHealth); + assertEquals(++expectedInvalidation, numInvalidation.get()); + + // green or yellow to red + previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentHealth = getClusterIndexHealth(ClusterHealthStatus.RED); + compositeRolesStore.onSecurityIndexHealthChange(previousHealth, currentHealth); + assertEquals(expectedInvalidation, numInvalidation.get()); + + // red to non red + previousHealth = getClusterIndexHealth(ClusterHealthStatus.RED); + currentHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + compositeRolesStore.onSecurityIndexHealthChange(previousHealth, currentHealth); + assertEquals(++expectedInvalidation, numInvalidation.get()); + + // green to yellow or yellow to green + previousHealth = getClusterIndexHealth(randomFrom(ClusterHealthStatus.GREEN, ClusterHealthStatus.YELLOW)); + currentHealth = getClusterIndexHealth( + previousHealth.getStatus() == ClusterHealthStatus.GREEN ? ClusterHealthStatus.YELLOW : ClusterHealthStatus.GREEN); + compositeRolesStore.onSecurityIndexHealthChange(previousHealth, currentHealth); + assertEquals(expectedInvalidation, numInvalidation.get()); + } + private static class InMemoryRolesProvider implements BiConsumer, ActionListener>> { private final Function, Set> roleDescriptorsFunc; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java index b597bfac0c9..d6c7d90c167 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/authz/store/ReservedRolesStoreTests.java @@ -302,12 +302,12 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(reportingUserRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); assertThat(reportingUserRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(false)); assertThat(reportingUserRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(false)); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(UpdateAction.NAME).test(index), is(true)); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); - assertThat(reportingUserRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(true)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(UpdateAction.NAME).test(index), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(false)); + assertThat(reportingUserRole.indices().allowedIndicesMatcher(BulkAction.NAME).test(index), is(false)); } public void testSuperuserRole() { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/bootstrap/BootstrapElasticPasswordTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/bootstrap/BootstrapElasticPasswordTests.java new file mode 100644 index 00000000000..4607b84ebfc --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/bootstrap/BootstrapElasticPasswordTests.java @@ -0,0 +1,337 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.bootstrap; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.XPackSettings; +import org.elasticsearch.xpack.security.SecurityLifecycleService; +import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; +import org.elasticsearch.xpack.security.authc.support.Hasher; +import org.junit.Before; +import org.mockito.ArgumentCaptor; + +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +@SuppressWarnings("unchecked") +public class BootstrapElasticPasswordTests extends ESTestCase { + + private ClusterService clusterService; + private ReservedRealm realm; + private SecurityLifecycleService lifecycle; + private ArgumentCaptor listenerCaptor; + private ArgumentCaptor actionLister; + + @Before + public void setupBootstrap() { + clusterService = mock(ClusterService.class); + realm = mock(ReservedRealm.class); + lifecycle = mock(SecurityLifecycleService.class); + listenerCaptor = ArgumentCaptor.forClass(ClusterStateListener.class); + actionLister = ArgumentCaptor.forClass(ActionListener.class); + } + + public void testNoListenerAttachedWhenNoBootstrapPassword() { + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(Settings.EMPTY, logger, clusterService, realm, lifecycle); + + bootstrap.initiatePasswordBootstrap(); + + verifyZeroInteractions(clusterService); + } + + public void testNoListenerAttachedWhenReservedRealmDisabled() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey(), randomAlphaOfLength(10)); + Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .put(XPackSettings.RESERVED_REALM_ENABLED_SETTING.getKey(), false) + .build(); + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(settings, logger, clusterService, realm, lifecycle); + + bootstrap.initiatePasswordBootstrap(); + + verifyZeroInteractions(clusterService); + } + + public void testPasswordHasToBeValid() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey(), randomAlphaOfLength(5)); + Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .build(); + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(settings, logger, clusterService, realm, lifecycle); + + expectThrows(ValidationException.class, bootstrap::initiatePasswordBootstrap); + + verifyZeroInteractions(clusterService); + } + + public void testDoesNotBootstrapUntilStateRecovered() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey(), randomAlphaOfLength(10)); + Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .build(); + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(settings, logger, clusterService, realm, lifecycle); + + bootstrap.initiatePasswordBootstrap(); + + verify(clusterService).addListener(listenerCaptor.capture()); + + ClusterStateListener listener = listenerCaptor.getValue(); + + ClusterChangedEvent event = mock(ClusterChangedEvent.class); + ClusterState state = mock(ClusterState.class); + ClusterBlocks blocks = mock(ClusterBlocks.class); + when(event.state()).thenReturn(state); + when(state.blocks()).thenReturn(blocks); + when(blocks.hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)).thenReturn(true); + when(lifecycle.isSecurityIndexOutOfDate()).thenReturn(false); + when(lifecycle.isSecurityIndexWriteable()).thenReturn(true); + + listener.clusterChanged(event); + + verifyZeroInteractions(realm); + } + + public void testDoesNotBootstrapUntilSecurityIndexUpdated() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey(), randomAlphaOfLength(10)); + Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .build(); + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(settings, logger, clusterService, realm, lifecycle); + + bootstrap.initiatePasswordBootstrap(); + + verify(clusterService).addListener(listenerCaptor.capture()); + + ClusterStateListener listener = listenerCaptor.getValue(); + + ClusterChangedEvent event = getStateRecoveredEvent(); + when(lifecycle.isSecurityIndexOutOfDate()).thenReturn(true); + when(lifecycle.isSecurityIndexWriteable()).thenReturn(true); + when(lifecycle.isSecurityIndexExisting()).thenReturn(true); + when(lifecycle.isSecurityIndexAvailable()).thenReturn(true); + + listener.clusterChanged(event); + + verifyZeroInteractions(realm); + } + + public void testDoesNotBootstrapUntilSecurityIndexIfExistingIsAvailable() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey(), randomAlphaOfLength(10)); + Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .build(); + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(settings, logger, clusterService, realm, lifecycle); + + bootstrap.initiatePasswordBootstrap(); + + verify(clusterService).addListener(listenerCaptor.capture()); + + ClusterStateListener listener = listenerCaptor.getValue(); + + ClusterChangedEvent event = getStateRecoveredEvent(); + when(lifecycle.isSecurityIndexOutOfDate()).thenReturn(false); + when(lifecycle.isSecurityIndexWriteable()).thenReturn(true); + when(lifecycle.isSecurityIndexExisting()).thenReturn(true); + when(lifecycle.isSecurityIndexAvailable()).thenReturn(false); + + listener.clusterChanged(event); + + verifyZeroInteractions(realm); + } + + public void testDoesNotBootstrapUntilSecurityIndexWriteable() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey(), randomAlphaOfLength(10)); + Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .build(); + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(settings, logger, clusterService, realm, lifecycle); + + bootstrap.initiatePasswordBootstrap(); + + verify(clusterService).addListener(listenerCaptor.capture()); + + ClusterStateListener listener = listenerCaptor.getValue(); + + ClusterChangedEvent event = getStateRecoveredEvent(); + when(lifecycle.isSecurityIndexOutOfDate()).thenReturn(false); + when(lifecycle.isSecurityIndexWriteable()).thenReturn(false); + when(lifecycle.isSecurityIndexExisting()).thenReturn(true); + when(lifecycle.isSecurityIndexAvailable()).thenReturn(true); + + listener.clusterChanged(event); + + verifyZeroInteractions(realm); + } + + public void testDoesAllowBootstrapForUnavailableIndexIfNotExisting() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey(), randomAlphaOfLength(10)); + Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .build(); + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(settings, logger, clusterService, realm, lifecycle); + + bootstrap.initiatePasswordBootstrap(); + + verify(clusterService).addListener(listenerCaptor.capture()); + + ClusterStateListener listener = listenerCaptor.getValue(); + + ClusterChangedEvent event = getStateRecoveredEvent(); + when(lifecycle.isSecurityIndexOutOfDate()).thenReturn(false); + when(lifecycle.isSecurityIndexWriteable()).thenReturn(true); + when(lifecycle.isSecurityIndexExisting()).thenReturn(false); + when(lifecycle.isSecurityIndexAvailable()).thenReturn(false); + + listener.clusterChanged(event); + + verify(realm).bootstrapElasticUserCredentials(any(SecureString.class), any(ActionListener.class)); + } + + public void testDoesNotBootstrapBeginsWhenRecoveryDoneAndIndexReady() { + String password = randomAlphaOfLength(10); + ensureBootstrapStarted(password); + + ArgumentCaptor hashedPasswordCaptor = ArgumentCaptor.forClass(SecureString.class); + verify(realm).bootstrapElasticUserCredentials(hashedPasswordCaptor.capture(), any(ActionListener.class)); + assertTrue(Hasher.BCRYPT.verify(new SecureString(password.toCharArray()), hashedPasswordCaptor.getValue().getChars())); + } + + public void testWillNotAllowTwoConcurrentBootstrapAttempts() { + String password = randomAlphaOfLength(10); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey(), password); + Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .build(); + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(settings, logger, clusterService, realm, lifecycle); + + bootstrap.initiatePasswordBootstrap(); + + verify(clusterService).addListener(listenerCaptor.capture()); + + ClusterStateListener listener = listenerCaptor.getValue(); + + ClusterChangedEvent event = getStateRecoveredEvent(); + when(lifecycle.isSecurityIndexOutOfDate()).thenReturn(false); + when(lifecycle.isSecurityIndexWriteable()).thenReturn(true); + when(lifecycle.isSecurityIndexExisting()).thenReturn(true); + when(lifecycle.isSecurityIndexAvailable()).thenReturn(true); + + listener.clusterChanged(event); + listener.clusterChanged(event); + + verify(realm, times(1)).bootstrapElasticUserCredentials(any(SecureString.class), any(ActionListener.class)); + } + + public void testWillNotAllowSecondBootstrapAttempt() { + String password = randomAlphaOfLength(10); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey(), password); + Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .build(); + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(settings, logger, clusterService, realm, lifecycle); + + bootstrap.initiatePasswordBootstrap(); + + verify(clusterService).addListener(listenerCaptor.capture()); + + ClusterStateListener listener = listenerCaptor.getValue(); + + ClusterChangedEvent event = getStateRecoveredEvent(); + when(lifecycle.isSecurityIndexOutOfDate()).thenReturn(false); + when(lifecycle.isSecurityIndexWriteable()).thenReturn(true); + when(lifecycle.isSecurityIndexExisting()).thenReturn(true); + when(lifecycle.isSecurityIndexAvailable()).thenReturn(true); + + listener.clusterChanged(event); + + verify(realm, times(1)).bootstrapElasticUserCredentials(any(SecureString.class), actionLister.capture()); + + actionLister.getValue().onResponse(true); + + listener.clusterChanged(event); + + verify(realm, times(1)).bootstrapElasticUserCredentials(any(SecureString.class), any()); + } + + public void testBootstrapCompleteRemovesListener() { + String password = randomAlphaOfLength(10); + ensureBootstrapStarted(password); + + verify(realm).bootstrapElasticUserCredentials(any(SecureString.class), actionLister.capture()); + + actionLister.getValue().onResponse(randomBoolean()); + + verify(clusterService).removeListener(listenerCaptor.getValue()); + } + + public void testBootstrapFailedRemovesListener() { + String password = randomAlphaOfLength(10); + ensureBootstrapStarted(password); + + verify(realm).bootstrapElasticUserCredentials(any(SecureString.class), actionLister.capture()); + + actionLister.getValue().onFailure(new RuntimeException("failed")); + + verify(clusterService).removeListener(listenerCaptor.getValue()); + } + + private void ensureBootstrapStarted(String password) { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString(ReservedRealm.BOOTSTRAP_ELASTIC_PASSWORD.getKey(), password); + Settings settings = Settings.builder() + .setSecureSettings(secureSettings) + .build(); + BootstrapElasticPassword bootstrap = new BootstrapElasticPassword(settings, logger, clusterService, realm, lifecycle); + + bootstrap.initiatePasswordBootstrap(); + + verify(clusterService).addListener(listenerCaptor.capture()); + + ClusterStateListener listener = listenerCaptor.getValue(); + + ClusterChangedEvent event = getStateRecoveredEvent(); + when(lifecycle.isSecurityIndexOutOfDate()).thenReturn(false); + when(lifecycle.isSecurityIndexWriteable()).thenReturn(true); + when(lifecycle.isSecurityIndexExisting()).thenReturn(true); + when(lifecycle.isSecurityIndexAvailable()).thenReturn(true); + + listener.clusterChanged(event); + } + + private ClusterChangedEvent getStateRecoveredEvent() { + ClusterChangedEvent event = mock(ClusterChangedEvent.class); + ClusterState state = mock(ClusterState.class); + ClusterBlocks blocks = mock(ClusterBlocks.class); + when(event.state()).thenReturn(state); + when(state.blocks()).thenReturn(blocks); + when(blocks.hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)).thenReturn(false); + return event; + } +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/support/AutomatonsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/support/AutomatonsTests.java index ea1b32fc838..efa7e39e2af 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/support/AutomatonsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/support/AutomatonsTests.java @@ -12,7 +12,9 @@ import org.elasticsearch.test.ESTestCase; import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES; import static org.elasticsearch.xpack.security.support.Automatons.pattern; import static org.elasticsearch.xpack.security.support.Automatons.patterns; +import static org.elasticsearch.xpack.security.support.Automatons.predicate; import static org.elasticsearch.xpack.security.support.Automatons.wildcard; +import static org.hamcrest.Matchers.equalTo; public class AutomatonsTests extends ESTestCase { public void testPatternsUnionOfMultiplePatterns() throws Exception { @@ -53,6 +55,12 @@ public class AutomatonsTests extends ESTestCase { assertMatch(wildcard("t\\*st"), "t*st"); } + public void testPredicateToString() throws Exception { + assertThat(predicate("a.*z").toString(), equalTo("a.*z")); + assertThat(predicate("a.*z", "A.*Z").toString(), equalTo("a.*z|A.*Z")); + assertThat(predicate("a.*z", "A.*Z", "Α.*Ω").toString(), equalTo("a.*z|A.*Z|Α.*Ω")); + } + private void assertMatch(Automaton automaton, String text) { CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_MAX_DETERMINIZED_STATES); assertTrue(runAutomaton.run(text)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/support/IndexLifecycleManagerTests.java b/plugin/src/test/java/org/elasticsearch/xpack/security/support/IndexLifecycleManagerTests.java index 18f2b27b986..2ebfa208411 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/support/IndexLifecycleManagerTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/support/IndexLifecycleManagerTests.java @@ -10,7 +10,10 @@ import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; import org.elasticsearch.Version; import org.elasticsearch.action.Action; @@ -28,15 +31,25 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.security.InternalClient; @@ -46,11 +59,11 @@ import org.hamcrest.Matchers; import org.junit.Before; import org.mockito.Mockito; +import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; import static org.elasticsearch.xpack.security.support.IndexLifecycleManager.NULL_MIGRATOR; import static org.elasticsearch.xpack.security.support.IndexLifecycleManager.TEMPLATE_VERSION_PATTERN; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Matchers.any; -import static org.mockito.Matchers.notNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -108,6 +121,17 @@ public class IndexLifecycleManagerTests extends ESTestCase { assertInitialState(); final ClusterState.Builder clusterStateBuilder = createClusterState(INDEX_NAME, TEMPLATE_NAME); + Index index = new Index(INDEX_NAME, UUID.randomUUID().toString()); + ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(index, 0), true, EXISTING_STORE_INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); + String nodeId = ESTestCase.randomAlphaOfLength(8); + IndexShardRoutingTable table = new IndexShardRoutingTable.Builder(new ShardId(index, 0)) + .addShard(shardRouting.initialize(nodeId, null, shardRouting.getExpectedShardSize()) + .moveToUnassigned(new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, ""))) + .build(); + clusterStateBuilder.routingTable(RoutingTable.builder() + .add(IndexRoutingTable.builder(index).addIndexShard(table).build()) + .build()); manager.clusterChanged(event(clusterStateBuilder)); assertIndexUpToDateButNotAvailable(); @@ -220,6 +244,87 @@ public class IndexLifecycleManagerTests extends ESTestCase { assertCompleteState(true); } + public void testIndexHealthChangeListeners() throws Exception { + final AtomicBoolean listenerCalled = new AtomicBoolean(false); + final AtomicReference previousHealth = new AtomicReference<>(); + final AtomicReference currentHealth = new AtomicReference<>(); + final BiConsumer listener = (prevState, state) -> { + previousHealth.set(prevState); + currentHealth.set(state); + listenerCalled.set(true); + }; + + if (randomBoolean()) { + if (randomBoolean()) { + manager.addIndexHealthChangeListener(listener); + manager.addIndexHealthChangeListener((prevState, state) -> { + throw new RuntimeException("throw after listener"); + }); + } else { + manager.addIndexHealthChangeListener((prevState, state) -> { + throw new RuntimeException("throw before listener"); + }); + manager.addIndexHealthChangeListener(listener); + } + } else { + manager.addIndexHealthChangeListener(listener); + } + + // index doesn't exist and now exists + final ClusterState.Builder clusterStateBuilder = createClusterState(INDEX_NAME, TEMPLATE_NAME); + markShardsAvailable(clusterStateBuilder); + manager.clusterChanged(event(clusterStateBuilder)); + + assertTrue(listenerCalled.get()); + assertNull(previousHealth.get()); + assertEquals(ClusterHealthStatus.GREEN, currentHealth.get().getStatus()); + + // reset and call with no change to the index + listenerCalled.set(false); + previousHealth.set(null); + currentHealth.set(null); + ClusterChangedEvent event = new ClusterChangedEvent("same index health", clusterStateBuilder.build(), clusterStateBuilder.build()); + manager.clusterChanged(event); + + assertFalse(listenerCalled.get()); + assertNull(previousHealth.get()); + assertNull(currentHealth.get()); + + // index with different health + listenerCalled.set(false); + previousHealth.set(null); + currentHealth.set(null); + ClusterState previousState = clusterStateBuilder.build(); + Index prevIndex = previousState.getRoutingTable().index(INDEX_NAME).getIndex(); + clusterStateBuilder.routingTable(RoutingTable.builder() + .add(IndexRoutingTable.builder(prevIndex) + .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(prevIndex, 0)) + .addShard(ShardRouting.newUnassigned(new ShardId(prevIndex, 0), true, EXISTING_STORE_INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) + .initialize(UUIDs.randomBase64UUID(random()), null, 0L) + .moveToUnassigned(new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, ""))) + .build())) + .build()); + + + + event = new ClusterChangedEvent("different index health", clusterStateBuilder.build(), previousState); + manager.clusterChanged(event); + assertTrue(listenerCalled.get()); + assertEquals(ClusterHealthStatus.GREEN, previousHealth.get().getStatus()); + assertEquals(ClusterHealthStatus.RED, currentHealth.get().getStatus()); + + // swap prev and current + listenerCalled.set(false); + previousHealth.set(null); + currentHealth.set(null); + event = new ClusterChangedEvent("different index health swapped", previousState, clusterStateBuilder.build()); + manager.clusterChanged(event); + assertTrue(listenerCalled.get()); + assertEquals(ClusterHealthStatus.RED, previousHealth.get().getStatus()); + assertEquals(ClusterHealthStatus.GREEN, currentHealth.get().getStatus()); + } + private void assertInitialState() { assertThat(manager.indexExists(), Matchers.equalTo(false)); assertThat(manager.isAvailable(), Matchers.equalTo(false)); @@ -368,5 +473,4 @@ public class IndexLifecycleManagerTests extends ESTestCase { final String resource = "/" + templateName + ".json"; return TemplateUtils.loadTemplate(resource, Version.CURRENT.toString(), TEMPLATE_VERSION_PATTERN); } - } \ No newline at end of file diff --git a/plugin/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java b/plugin/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java index 115cde86a7f..0b109874061 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java @@ -6,16 +6,21 @@ package org.elasticsearch.xpack.security.test; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; @@ -29,6 +34,7 @@ import java.util.UUID; import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_INDEX_NAME; +import static org.junit.Assert.assertEquals; public class SecurityTestUtils { @@ -87,4 +93,59 @@ public class SecurityTestUtils { return metaDataBuilder.build(); } + public static ClusterIndexHealth getClusterIndexHealth(ClusterHealthStatus status) { + IndexMetaData metaData = IndexMetaData.builder("foo").settings(Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .build()) + .build(); + final IndexRoutingTable routingTable; + switch (status) { + case RED: + routingTable = IndexRoutingTable.builder(metaData.getIndex()) + .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(metaData.getIndex(), 0)) + .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), true, EXISTING_STORE_INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) + .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L)) + .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), false, + RecoverySource.PeerRecoverySource.INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) + .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L)) + .build()) + .build(); + break; + case YELLOW: + routingTable = IndexRoutingTable.builder(metaData.getIndex()) + .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(metaData.getIndex(), 0)) + .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), true, EXISTING_STORE_INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) + .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L).moveToStarted()) + .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), false, + RecoverySource.PeerRecoverySource.INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) + .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L)) + .build()) + .build(); + break; + case GREEN: + routingTable = IndexRoutingTable.builder(metaData.getIndex()) + .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(metaData.getIndex(), 0)) + .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), true, EXISTING_STORE_INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) + .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L).moveToStarted()) + .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), false, + RecoverySource.PeerRecoverySource.INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) + .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L).moveToStarted()) + .build()) + .build(); + break; + default: + throw new IllegalStateException("unknown status: " + status); + } + ClusterIndexHealth health = new ClusterIndexHealth(metaData, routingTable); + assertEquals(status, health.getStatus()); + return health; + } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ssl/CertificateToolTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ssl/CertificateToolTests.java index 2fc6755fdf5..14dd78f902f 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/ssl/CertificateToolTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/ssl/CertificateToolTests.java @@ -5,35 +5,6 @@ */ package org.elasticsearch.xpack.ssl; -import com.google.common.jimfs.Configuration; -import com.google.common.jimfs.Jimfs; -import org.apache.lucene.util.IOUtils; -import org.bouncycastle.asn1.ASN1String; -import org.bouncycastle.asn1.DEROctetString; -import org.bouncycastle.asn1.pkcs.Attribute; -import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; -import org.bouncycastle.asn1.x509.Extension; -import org.bouncycastle.asn1.x509.Extensions; -import org.bouncycastle.asn1.x509.GeneralName; -import org.bouncycastle.asn1.x509.GeneralNames; -import org.bouncycastle.cert.X509CertificateHolder; -import org.bouncycastle.openssl.PEMEncryptedKeyPair; -import org.bouncycastle.openssl.PEMParser; -import org.bouncycastle.pkcs.PKCS10CertificationRequest; -import org.elasticsearch.cli.MockTerminal; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.SecuritySettingsSource; -import org.elasticsearch.xpack.ssl.CertificateTool.CAInfo; -import org.elasticsearch.xpack.ssl.CertificateTool.CertificateInformation; -import org.elasticsearch.xpack.ssl.CertificateTool.Name; -import org.junit.After; - import javax.security.auth.x500.X500Principal; import java.io.IOException; import java.io.Reader; @@ -64,8 +35,44 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import com.google.common.jimfs.Configuration; +import com.google.common.jimfs.Jimfs; +import org.apache.lucene.util.IOUtils; +import org.bouncycastle.asn1.ASN1ObjectIdentifier; +import org.bouncycastle.asn1.ASN1Sequence; +import org.bouncycastle.asn1.ASN1String; +import org.bouncycastle.asn1.BERTags; +import org.bouncycastle.asn1.DEROctetString; +import org.bouncycastle.asn1.DERSequence; +import org.bouncycastle.asn1.pkcs.Attribute; +import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; +import org.bouncycastle.asn1.x509.Extension; +import org.bouncycastle.asn1.x509.Extensions; +import org.bouncycastle.asn1.x509.GeneralName; +import org.bouncycastle.asn1.x509.GeneralNames; +import org.bouncycastle.cert.X509CertificateHolder; +import org.bouncycastle.openssl.PEMEncryptedKeyPair; +import org.bouncycastle.openssl.PEMParser; +import org.bouncycastle.pkcs.PKCS10CertificationRequest; +import org.elasticsearch.cli.MockTerminal; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.SecuritySettingsSource; +import org.elasticsearch.xpack.ssl.CertificateTool.CAInfo; +import org.elasticsearch.xpack.ssl.CertificateTool.CertificateInformation; +import org.elasticsearch.xpack.ssl.CertificateTool.Name; +import org.hamcrest.Matchers; +import org.junit.After; + import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.startsWith; /** * Unit tests for the tool used to simplify SSL certificate generation @@ -179,21 +186,25 @@ public class CertificateToolTests extends ESTestCase { CertificateInformation certInfo = certInfosMap.get("node1"); assertEquals(Collections.singletonList("127.0.0.1"), certInfo.ipAddresses); assertEquals(Collections.singletonList("localhost"), certInfo.dnsNames); + assertEquals(Collections.emptyList(), certInfo.commonNames); assertEquals("node1", certInfo.name.filename); certInfo = certInfosMap.get("node2"); assertEquals(Collections.singletonList("::1"), certInfo.ipAddresses); assertEquals(Collections.emptyList(), certInfo.dnsNames); + assertEquals(Collections.singletonList("node2.elasticsearch"), certInfo.commonNames); assertEquals("node2", certInfo.name.filename); certInfo = certInfosMap.get("node3"); assertEquals(Collections.emptyList(), certInfo.ipAddresses); assertEquals(Collections.emptyList(), certInfo.dnsNames); + assertEquals(Collections.emptyList(), certInfo.commonNames); assertEquals("node3", certInfo.name.filename); certInfo = certInfosMap.get("CN=different value"); assertEquals(Collections.emptyList(), certInfo.ipAddresses); assertEquals(Collections.singletonList("node4.mydomain.com"), certInfo.dnsNames); + assertEquals(Collections.emptyList(), certInfo.commonNames); assertEquals("different file", certInfo.name.filename); } @@ -307,7 +318,7 @@ public class CertificateToolTests extends ESTestCase { try (Reader reader = Files.newBufferedReader(cert)) { X509Certificate certificate = readX509Certificate(reader); assertEquals(certInfo.name.x500Principal.toString(), certificate.getSubjectX500Principal().getName()); - final int sanCount = certInfo.ipAddresses.size() + certInfo.dnsNames.size(); + final int sanCount = certInfo.ipAddresses.size() + certInfo.dnsNames.size() + certInfo.commonNames.size(); if (sanCount == 0) { assertNull(certificate.getSubjectAlternativeNames()); } else { @@ -434,17 +445,25 @@ public class CertificateToolTests extends ESTestCase { } private void assertSubjAltNames(GeneralNames subjAltNames, CertificateInformation certInfo) throws Exception { - assertEquals(certInfo.ipAddresses.size() + certInfo.dnsNames.size(), subjAltNames.getNames().length); + final int expectedCount = certInfo.ipAddresses.size() + certInfo.dnsNames.size() + certInfo.commonNames.size(); + assertEquals(expectedCount, subjAltNames.getNames().length); Collections.sort(certInfo.dnsNames); Collections.sort(certInfo.ipAddresses); for (GeneralName generalName : subjAltNames.getNames()) { if (generalName.getTagNo() == GeneralName.dNSName) { - String dns = ((ASN1String)generalName.getName()).getString(); + String dns = ((ASN1String) generalName.getName()).getString(); assertTrue(certInfo.dnsNames.stream().anyMatch(dns::equals)); } else if (generalName.getTagNo() == GeneralName.iPAddress) { byte[] ipBytes = DEROctetString.getInstance(generalName.getName()).getOctets(); String ip = NetworkAddress.format(InetAddress.getByAddress(ipBytes)); assertTrue(certInfo.ipAddresses.stream().anyMatch(ip::equals)); + } else if (generalName.getTagNo() == GeneralName.otherName) { + ASN1Sequence seq = ASN1Sequence.getInstance(generalName.getName()); + assertThat(seq.size(), equalTo(2)); + assertThat(seq.getObjectAt(0), instanceOf(ASN1ObjectIdentifier.class)); + assertThat(seq.getObjectAt(0).toString(), equalTo(CertUtils.CN_OID)); + assertThat(seq.getObjectAt(1), instanceOf(ASN1String.class)); + assertThat(seq.getObjectAt(1).toString(), Matchers.isIn(certInfo.commonNames)); } else { fail("unknown general name with tag " + generalName.getTagNo()); } @@ -478,6 +497,8 @@ public class CertificateToolTests extends ESTestCase { " - name: \"node2\"", " filename: \"node2\"", " ip: \"::1\"", + " cn:", + " - \"node2.elasticsearch\"", " - name: \"node3\"", " filename: \"node3\"", " - name: \"CN=different value\"", diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ssl/RestrictedTrustManagerTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ssl/RestrictedTrustManagerTests.java new file mode 100644 index 00000000000..04e1129d154 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/ssl/RestrictedTrustManagerTests.java @@ -0,0 +1,178 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ssl; + +import javax.net.ssl.X509ExtendedTrustManager; +import javax.security.auth.x500.X500Principal; +import java.io.IOException; +import java.security.GeneralSecurityException; +import java.security.KeyPair; +import java.security.cert.Certificate; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.bouncycastle.asn1.x509.GeneralName; +import org.bouncycastle.asn1.x509.GeneralNames; +import org.bouncycastle.operator.OperatorException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Description; +import org.hamcrest.TypeSafeMatcher; +import org.junit.Assert; +import org.junit.Before; + +import static org.elasticsearch.xpack.ssl.CertUtils.generateSignedCertificate; + +public class RestrictedTrustManagerTests extends ESTestCase { + + /** + * Use a small keysize for performance, since the keys are only used in this test, but a large enough keysize + * to get past the SSL algorithm checker + */ + private static final int KEYSIZE = 1024; + + private X509ExtendedTrustManager baseTrustManager; + private Map certificates; + private int numberOfClusters; + private int numberOfNodes; + + @Before + public void generateCertificates() throws GeneralSecurityException, IOException, OperatorException { + KeyPair caPair = CertUtils.generateKeyPair(KEYSIZE); + X500Principal ca = new X500Principal("cn=CertAuth"); + X509Certificate caCert = CertUtils.generateCACertificate(ca, caPair, 30); + baseTrustManager = CertUtils.trustManager(new Certificate[] { caCert }); + + certificates = new HashMap<>(); + numberOfClusters = scaledRandomIntBetween(2, 8); + numberOfNodes = scaledRandomIntBetween(2, 8); + for (int cluster = 1; cluster <= numberOfClusters; cluster++) { + for (int node = 1; node <= numberOfNodes; node++) { + KeyPair nodePair = CertUtils.generateKeyPair(KEYSIZE); + final String cn = "n" + node + ".c" + cluster; + final X500Principal principal = new X500Principal("cn=" + cn); + final String san = "node" + node + ".cluster" + cluster + ".elasticsearch"; + final GeneralNames altNames = new GeneralNames(CertUtils.createCommonName(san)); + final X509Certificate signed = generateSignedCertificate(principal, altNames, nodePair, caCert, caPair.getPrivate(), 30); + final X509Certificate self = generateSignedCertificate(principal, altNames, nodePair, null, null, 30); + certificates.put(cn + "/ca", new X509Certificate[] { signed }); + certificates.put(cn + "/self", new X509Certificate[] { self }); + } + } + } + + public void testTrustsExplicitCertificateName() throws Exception { + final int trustedCluster = randomIntBetween(1, numberOfClusters); + final List trustedNames = new ArrayList<>(numberOfNodes); + for (int node = 1; node <= numberOfNodes; node++) { + trustedNames.add("node" + node + ".cluster" + trustedCluster + ".elasticsearch"); + } + final CertificateTrustRestrictions restrictions = new CertificateTrustRestrictions(trustedNames); + final RestrictedTrustManager trustManager = new RestrictedTrustManager(Settings.EMPTY, baseTrustManager, restrictions); + assertSingleClusterIsTrusted(trustedCluster, trustManager, trustedNames); + } + + public void testTrustsWildcardCertificateName() throws Exception { + final int trustedCluster = randomIntBetween(1, numberOfClusters); + final List trustedNames = Collections.singletonList("*.cluster" + trustedCluster + ".elasticsearch"); + final CertificateTrustRestrictions restrictions = new CertificateTrustRestrictions(trustedNames); + final RestrictedTrustManager trustManager = new RestrictedTrustManager(Settings.EMPTY, baseTrustManager, restrictions); + assertSingleClusterIsTrusted(trustedCluster, trustManager, trustedNames); + } + + public void testTrustWithRegexCertificateName() throws Exception { + final int trustedNode = randomIntBetween(1, numberOfNodes); + final List trustedNames = Collections.singletonList("/node" + trustedNode + ".cluster[0-9].elasticsearch/"); + final CertificateTrustRestrictions restrictions = new CertificateTrustRestrictions( + trustedNames + ); + final RestrictedTrustManager trustManager = new RestrictedTrustManager(Settings.EMPTY, baseTrustManager, restrictions); + for (int cluster = 1; cluster <= numberOfClusters; cluster++) { + for (int node = 1; node <= numberOfNodes; node++) { + if (node == trustedNode) { + assertTrusted(trustManager, "n" + node + ".c1/ca"); + } else { + assertNotTrusted(trustManager, "n" + node + ".c" + cluster + "/ca", trustedNames); + } + } + } + } + + public void testThatDelegateTrustManagerIsRespected() throws Exception { + final CertificateTrustRestrictions restrictions = new CertificateTrustRestrictions(Collections.singletonList("*.elasticsearch")); + final RestrictedTrustManager trustManager = new RestrictedTrustManager(Settings.EMPTY, baseTrustManager, restrictions); + for (String cert : certificates.keySet()) { + if (cert.endsWith("/ca")) { + assertTrusted(trustManager, cert); + } else { + assertNotValid(trustManager, cert, "PKIX path building failed.*"); + } + } + } + + private void assertSingleClusterIsTrusted(int trustedCluster, RestrictedTrustManager trustManager, List trustedNames) + throws Exception { + for (int cluster = 1; cluster <= numberOfClusters; cluster++) { + for (int node = 1; node <= numberOfNodes; node++) { + final String certAlias = "n" + node + ".c" + cluster + "/ca"; + if (cluster == trustedCluster) { + assertTrusted(trustManager, certAlias); + } else { + assertNotTrusted(trustManager, certAlias, trustedNames); + } + } + } + } + + private void assertTrusted(RestrictedTrustManager trustManager, String certAlias) throws Exception { + final X509Certificate[] chain = Objects.requireNonNull(this.certificates.get(certAlias)); + try { + trustManager.checkClientTrusted(chain, "ignore"); + // pass + } catch (CertificateException e) { + Assert.fail("Certificate " + describe(chain) + " is not trusted - " + e); + } + } + + private void assertNotTrusted(RestrictedTrustManager trustManager, String certAlias, List trustedNames) throws Exception { + final String expectedError = ".* does not match the trusted names \\[.*" + Pattern.quote(trustedNames.get(0)) + ".*"; + assertNotValid(trustManager, certAlias, expectedError); + } + + private void assertNotValid(RestrictedTrustManager trustManager, String certAlias, String expectedError) throws Exception { + final X509Certificate[] chain = Objects.requireNonNull(this.certificates.get(certAlias)); + try { + trustManager.checkClientTrusted(chain, "ignore"); + Assert.fail("Certificate " + describe(chain) + " is trusted but shouldn't be"); + } catch (CertificateException e) { + assertThat(e.getMessage(), new TypeSafeMatcher() { + @Override + public void describeTo(Description description) { + description.appendText("matches pattern ").appendText(expectedError); + } + + @Override + protected boolean matchesSafely(String item) { + return item.matches(expectedError); + } + }); + } + } + + private String describe(X509Certificate[] cert) { + return Arrays.stream(cert).map(c -> c.getSubjectDN().getName()).collect(Collectors.joining(", ")); + } + +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java new file mode 100644 index 00000000000..1d247038926 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java @@ -0,0 +1,252 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ssl; + +import javax.net.ssl.SSLHandshakeException; +import javax.net.ssl.SSLSocket; +import javax.net.ssl.SSLSocketFactory; +import javax.security.auth.x500.X500Principal; +import java.io.BufferedWriter; +import java.io.IOException; +import java.net.SocketException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.KeyPair; +import java.security.PrivateKey; +import java.security.cert.X509Certificate; +import java.util.Collections; + +import org.bouncycastle.asn1.x509.GeneralName; +import org.bouncycastle.asn1.x509.GeneralNames; +import org.bouncycastle.openssl.jcajce.JcaPEMWriter; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.transport.Transport; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import static org.elasticsearch.xpack.ssl.CertUtils.generateSignedCertificate; +import static org.hamcrest.Matchers.is; + +/** + * Integration tests for SSL trust restrictions + * + * @see RestrictedTrustManager + */ +@ESIntegTestCase.ClusterScope(numDataNodes = 1, numClientNodes = 0, supportsDedicatedMasters = false) +@TestLogging("org.elasticsearch.xpack.ssl.RestrictedTrustManager:DEBUG") +public class SSLTrustRestrictionsTests extends SecurityIntegTestCase { + + /** + * Use a small keysize for performance, since the keys are only used in this test, but a large enough keysize + * to get past the SSL algorithm checker + */ + private static final int KEYSIZE = 1024; + + private static final int RESOURCE_RELOAD_MILLIS = 3; + private static final int WAIT_RELOAD_MILLIS = 25; + + private static Path configPath; + private static Settings nodeSSL; + + private static CertificateInfo ca; + private static CertificateInfo trustedCert; + private static CertificateInfo untrustedCert; + private static Path restrictionsPath; + + @Override + protected int maxNumberOfNodes() { + // We are trying to test the SSL configuration for which clients/nodes may join a cluster + // We prefer the cluster to only have 1 node, so that the SSL checking doesn't happen until the test methods run + // (That's not _quite_ true, because the base setup code checks the cluster using transport client, but it's the best we can do) + return 1; + } + + @BeforeClass + public static void setupCertificates() throws Exception { + configPath = createTempDir(); + + final KeyPair caPair = CertUtils.generateKeyPair(KEYSIZE); + final X509Certificate caCert = CertUtils.generateCACertificate(new X500Principal("cn=CertAuth"), caPair, 30); + ca = writeCertificates("ca", caPair.getPrivate(), caCert); + + trustedCert = generateCertificate("trusted", "node.trusted"); + untrustedCert = generateCertificate("untrusted", "someone.else"); + + nodeSSL = Settings.builder() + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.security.transport.ssl.verification_mode", "certificate") + .putArray("xpack.ssl.certificate_authorities", ca.getCertPath().toString()) + .put("xpack.ssl.key", trustedCert.getKeyPath()) + .put("xpack.ssl.certificate", trustedCert.getCertPath()) + .build(); + } + + @AfterClass + public static void cleanup() { + configPath = null; + nodeSSL = null; + ca = null; + trustedCert = null; + untrustedCert = null; + } + + @Override + public Settings nodeSettings(int nodeOrdinal) { + + Settings parentSettings = super.nodeSettings(nodeOrdinal); + Settings.Builder builder = Settings.builder() + .put(parentSettings.filter((s) -> s.startsWith("xpack.ssl.") == false)) + .put(nodeSSL); + + restrictionsPath = configPath.resolve("trust_restrictions.yml"); + writeRestrictions("*.trusted"); + builder.put("xpack.ssl.trust_restrictions.path", restrictionsPath); + builder.put("resource.reload.interval.high", RESOURCE_RELOAD_MILLIS + "ms"); + + return builder.build(); + } + + private void writeRestrictions(String trustedPattern) { + try { + Files.write(restrictionsPath, Collections.singleton("trust.subject_name: \"" + trustedPattern + "\"")); + } catch (IOException e) { + throw new ElasticsearchException("failed to write restrictions", e); + } + } + + @Override + protected Settings transportClientSettings() { + Settings parentSettings = super.transportClientSettings(); + Settings.Builder builder = Settings.builder() + .put(parentSettings.filter((s) -> s.startsWith("xpack.ssl.") == false)) + .put(nodeSSL); + return builder.build(); + } + + @Override + protected boolean useGeneratedSSLConfig() { + return false; + } + + public void testCertificateWithTrustedNameIsAccepted() throws Exception { + writeRestrictions("*.trusted"); + try { + tryConnect(trustedCert); + } catch (SSLHandshakeException | SocketException ex) { + fail("handshake should have been successful, but failed with " + ex); + } + } + + public void testCertificateWithUntrustedNameFails() throws Exception { + writeRestrictions("*.trusted"); + try { + tryConnect(untrustedCert); + fail("handshake should have failed, but was successful"); + } catch (SSLHandshakeException | SocketException ex) { + // expected + } + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/2007") + public void testRestrictionsAreReloaded() throws Exception { + writeRestrictions("*"); + try { + tryConnect(untrustedCert); + } catch (SSLHandshakeException | SocketException ex) { + fail("handshake should have been successful, but failed with " + ex); + } + writeRestrictions("*.trusted"); + Thread.sleep(WAIT_RELOAD_MILLIS); + try { + tryConnect(untrustedCert); + fail("handshake should have failed, but was successful"); + } catch (SSLHandshakeException | SocketException ex) { + // expected + } + } + + private void tryConnect(CertificateInfo certificate) throws Exception { + Settings settings = Settings.builder() + .put("path.home", createTempDir()) + .put("xpack.ssl.key", certificate.getKeyPath()) + .put("xpack.ssl.certificate", certificate.getCertPath()) + .putArray("xpack.ssl.certificate_authorities", ca.getCertPath().toString()) + .put("xpack.ssl.verification_mode", "certificate") + .build(); + + String node = randomFrom(internalCluster().getNodeNames()); + SSLService sslService = new SSLService(settings, new Environment(settings)); + SSLSocketFactory sslSocketFactory = sslService.sslSocketFactory(settings); + TransportAddress address = internalCluster().getInstance(Transport.class, node).boundAddress().publishAddress(); + try (SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket(address.getAddress(), address.getPort())) { + assertThat(socket.isConnected(), is(true)); + // The test simply relies on this (synchronously) connecting (or not), so we don't need a handshake handler + socket.startHandshake(); + } + } + + + private static CertificateInfo generateCertificate(String name, String san) throws Exception { + final KeyPair keyPair = CertUtils.generateKeyPair(KEYSIZE); + final X500Principal principal = new X500Principal("cn=" + name); + final GeneralNames altNames = new GeneralNames(CertUtils.createCommonName(san)); + final X509Certificate cert = generateSignedCertificate(principal, altNames, keyPair, ca.getCertificate(), ca.getKey(), 30); + return writeCertificates(name, keyPair.getPrivate(), cert); + } + + private static CertificateInfo writeCertificates(String name, PrivateKey key, X509Certificate cert) throws IOException { + final Path keyPath = writePem(key, name + ".key"); + final Path certPath = writePem(cert, name + ".crt"); + return new CertificateInfo(key, keyPath, cert, certPath); + } + + private static Path writePem(Object obj, String filename) throws IOException { + Path path = configPath.resolve(filename); + Files.deleteIfExists(path); + try (BufferedWriter out = Files.newBufferedWriter(path); + JcaPEMWriter pemWriter = new JcaPEMWriter(out)) { + pemWriter.writeObject(obj); + } + return path; + } + + private static class CertificateInfo { + private final PrivateKey key; + private final Path keyPath; + private final X509Certificate certificate; + private final Path certPath; + + private CertificateInfo(PrivateKey key, Path keyPath, X509Certificate certificate, Path certPath) { + this.key = key; + this.keyPath = keyPath; + this.certificate = certificate; + this.certPath = certPath; + } + + private PrivateKey getKey() { + return key; + } + + private Path getKeyPath() { + return keyPath; + } + + private X509Certificate getCertificate() { + return certificate; + } + + private Path getCertPath() { + return certPath; + } + } +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java index 65bd68e7a42..e6477cc9ce1 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestTestCase.java @@ -38,9 +38,7 @@ import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordTok public abstract class XPackRestTestCase extends ESClientYamlSuiteTestCase { private static final String BASIC_AUTH_VALUE = - basicAuthHeaderValue("elastic", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); - - private final SetOnce oneAllowed401 = new SetOnce<>(); + basicAuthHeaderValue("x_pack_rest_user", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING); public XPackRestTestCase(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); @@ -58,26 +56,6 @@ public abstract class XPackRestTestCase extends ESClientYamlSuiteTestCase { .build(); } - - @Before - public void setPasswords() throws IOException { - BasicHeader authHeader = new BasicHeader("Authorization", - basicAuthHeaderValue("elastic", new SecureString("".toCharArray()))); - String elasticUserPayload = "{\"password\" : \"" + SecuritySettingsSource.TEST_PASSWORD + "\"}"; - try { - client().performRequest("put", "_xpack/security/user/elastic/_password", Collections.emptyMap(), - new StringEntity(elasticUserPayload, ContentType.APPLICATION_JSON), authHeader); - } catch (ResponseException e) { - // The password might have already been set by the build.gradle file. So we ignore unsuccessful attempts - // due to failed authentication - if (e.getResponse().getStatusLine().getStatusCode() != 401) { - throw e; - } else { - oneAllowed401.set(e.getResponse().getStatusLine().getStatusCode()); - } - } - } - /** * Waits for the Machine Learning templates to be created by {@link MachineLearningTemplateRegistry}. */ diff --git a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckTests.java b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckTests.java index 2492ff36273..48b0e8a2738 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckTests.java @@ -13,60 +13,30 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.Collections; import static org.hamcrest.core.IsEqual.equalTo; public class IndexUpgradeCheckTests extends ESTestCase { - public void testKibanaUpgradeCheck() throws Exception { - IndexUpgradeCheck check = Upgrade.getKibanaUpgradeCheckFactory(Settings.EMPTY).v2().apply(null, null); - assertThat(check.getName(), equalTo("kibana")); - IndexMetaData goodKibanaIndex = newTestIndexMeta(".kibana", Settings.EMPTY); - assertThat(check.actionRequired(goodKibanaIndex, Collections.emptyMap()), - equalTo(UpgradeActionRequired.UPGRADE)); - - IndexMetaData renamedKibanaIndex = newTestIndexMeta(".kibana2", Settings.EMPTY); - assertThat(check.actionRequired(renamedKibanaIndex, Collections.emptyMap()), - equalTo(UpgradeActionRequired.NOT_APPLICABLE)); - - assertThat(check.actionRequired(renamedKibanaIndex, Collections.singletonMap("kibana_indices", ".kibana*") - ), equalTo(UpgradeActionRequired.UPGRADE)); - - assertThat(check.actionRequired(renamedKibanaIndex, Collections.singletonMap("kibana_indices", ".kibana1,.kibana2") - ), equalTo(UpgradeActionRequired.UPGRADE)); - - IndexMetaData watcherIndex = newTestIndexMeta(".watches", Settings.EMPTY); - assertThat(check.actionRequired(watcherIndex, Collections.singletonMap("kibana_indices", ".kibana*")), - equalTo(UpgradeActionRequired.NOT_APPLICABLE)); - - IndexMetaData securityIndex = newTestIndexMeta(".security", Settings.EMPTY); - assertThat(check.actionRequired(securityIndex, Collections.singletonMap("kibana_indices", ".kibana*")), - equalTo(UpgradeActionRequired.NOT_APPLICABLE)); - } - - public void testWatcherIndexUpgradeCheck() throws Exception{ - IndexUpgradeCheck check = Upgrade.getWatcherUpgradeCheckFactory(Settings.EMPTY).v2().apply(null, null); + public void testWatcherIndexUpgradeCheck() throws Exception { + IndexUpgradeCheck check = Upgrade.getWatcherUpgradeCheckFactory(Settings.EMPTY).apply(null, null); assertThat(check.getName(), equalTo("watcher")); IndexMetaData goodKibanaIndex = newTestIndexMeta(".kibana", Settings.EMPTY); - assertThat(check.actionRequired(goodKibanaIndex, Collections.emptyMap()), - equalTo(UpgradeActionRequired.NOT_APPLICABLE)); + assertThat(check.actionRequired(goodKibanaIndex), equalTo(UpgradeActionRequired.NOT_APPLICABLE)); IndexMetaData watcherIndex = newTestIndexMeta(".watches", Settings.EMPTY); - assertThat(check.actionRequired(watcherIndex, Collections.singletonMap("kibana_indices", ".kibana*")), - equalTo(UpgradeActionRequired.UPGRADE)); + assertThat(check.actionRequired(watcherIndex), equalTo(UpgradeActionRequired.UPGRADE)); - IndexMetaData watcherIndexWithAlias = newTestIndexMeta("my_watches", ".watches", Settings.EMPTY, "watch"); - assertThat(check.actionRequired(watcherIndexWithAlias, Collections.emptyMap()), - equalTo(UpgradeActionRequired.UPGRADE)); + IndexMetaData watcherIndexWithAlias = newTestIndexMeta("my_watches", ".watches", Settings.EMPTY); + assertThat(check.actionRequired(watcherIndexWithAlias), equalTo(UpgradeActionRequired.UPGRADE)); - IndexMetaData watcherIndexWithAliasUpgraded = newTestIndexMeta("my_watches", ".watches", Settings.EMPTY, "doc"); - assertThat(check.actionRequired(watcherIndexWithAliasUpgraded, Collections.emptyMap()), - equalTo(UpgradeActionRequired.UP_TO_DATE)); + IndexMetaData watcherIndexWithAliasUpgraded = newTestIndexMeta("my_watches", ".watches", + Settings.builder().put(IndexMetaData.INDEX_FORMAT_SETTING.getKey(), "6").put().build()); + assertThat(check.actionRequired(watcherIndexWithAliasUpgraded), equalTo(UpgradeActionRequired.UP_TO_DATE)); } - public static IndexMetaData newTestIndexMeta(String name, String alias, Settings indexSettings, String type) throws IOException { + public static IndexMetaData newTestIndexMeta(String name, String alias, Settings indexSettings) throws IOException { Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) @@ -80,15 +50,11 @@ public class IndexUpgradeCheckTests extends ESTestCase { // Create alias builder.putAlias(AliasMetaData.newAliasMetaDataBuilder(alias).build()); } - if (type != null) { - // Create fake type - builder.putMapping(type, "{}"); - } return builder.build(); } public static IndexMetaData newTestIndexMeta(String name, Settings indexSettings) throws IOException { - return newTestIndexMeta(name, null, indexSettings, "foo"); + return newTestIndexMeta(name, null, indexSettings); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index d5e7be3baa3..e91d961c4e4 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -11,7 +11,9 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeAction; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeInfoAction; @@ -22,7 +24,7 @@ import java.util.Collections; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.empty; import static org.hamcrest.core.IsEqual.equalTo; @@ -34,15 +36,11 @@ public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { } public void testIndexUpgradeInfo() { + // Testing only negative case here, the positive test is done in bwcTests assertAcked(client().admin().indices().prepareCreate("test").get()); - assertAcked(client().admin().indices().prepareCreate("kibana_test").get()); - ensureYellow("test", "kibana_test"); - Response response = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test", "kibana_test") - .setExtraParams(Collections.singletonMap("kibana_indices", "kibana_test")).get(); - logger.info("Got response [{}]", Strings.toString(response)); - assertThat(response.getActions().size(), equalTo(1)); - assertThat(response.getActions().get("kibana_test"), equalTo(UpgradeActionRequired.UPGRADE)); - assertThat(Strings.toString(response), containsString("kibana_test")); + ensureYellow("test"); + Response response = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test").get(); + assertThat(response.getActions().entrySet(), empty()); } public void testIndexUpgradeInfoLicense() throws Exception { @@ -57,8 +55,9 @@ public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { assertThat(response.getActions().entrySet(), empty()); } - public void testUpgradeInternalIndex() throws Exception { - String testIndex = ".kibana"; + public void testUpToDateIndexUpgrade() throws Exception { + // Testing only negative case here, the positive test is done in bwcTests + String testIndex = "test"; String testType = "doc"; assertAcked(client().admin().indices().prepareCreate(testIndex).get()); indexRandom(true, @@ -67,22 +66,25 @@ public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { ); ensureYellow(testIndex); - BulkByScrollResponse response = client().prepareExecute(IndexUpgradeAction.INSTANCE).setIndex(testIndex).get(); - assertThat(response.getCreated(), equalTo(2L)); + IllegalStateException ex = expectThrows(IllegalStateException.class, + () -> client().prepareExecute(IndexUpgradeAction.INSTANCE).setIndex(testIndex).get()); + assertThat(ex.getMessage(), equalTo("Index [" + testIndex + "] cannot be upgraded")); SearchResponse searchResponse = client().prepareSearch(testIndex).get(); assertEquals(2L, searchResponse.getHits().getTotalHits()); } - public void testInternalUpgradePrePostChecks() { + public void testInternalUpgradePrePostChecks() throws Exception { + String testIndex = "internal_index"; + String testType = "test"; Long val = randomLong(); AtomicBoolean preUpgradeIsCalled = new AtomicBoolean(); AtomicBoolean postUpgradeIsCalled = new AtomicBoolean(); IndexUpgradeCheck check = new IndexUpgradeCheck( "test", Settings.EMPTY, - (indexMetaData, stringStringMap) -> { - if (indexMetaData.getIndex().getName().equals("internal_index")) { + indexMetaData -> { + if (indexMetaData.getIndex().getName().equals(testIndex)) { return UpgradeActionRequired.UPGRADE; } else { return UpgradeActionRequired.NOT_APPLICABLE; @@ -101,16 +103,33 @@ public class IndexUpgradeIT extends IndexUpgradeIntegTestCase { listener.onResponse(TransportResponse.Empty.INSTANCE); }); - assertAcked(client().admin().indices().prepareCreate("internal_index").get()); + assertAcked(client().admin().indices().prepareCreate(testIndex).get()); + indexRandom(true, + client().prepareIndex(testIndex, testType, "1").setSource("{\"foo\":\"bar\"}", XContentType.JSON), + client().prepareIndex(testIndex, testType, "2").setSource("{\"foo\":\"baz\"}", XContentType.JSON) + ); + ensureYellow(testIndex); IndexUpgradeService service = new IndexUpgradeService(Settings.EMPTY, Collections.singletonList(check)); PlainActionFuture future = PlainActionFuture.newFuture(); - service.upgrade("internal_index", Collections.emptyMap(), clusterService().state(), future); - future.actionGet(); + service.upgrade(new TaskId("abc", 123), testIndex, clusterService().state(), future); + BulkByScrollResponse response = future.actionGet(); + assertThat(response.getCreated(), equalTo(2L)); + + SearchResponse searchResponse = client().prepareSearch(testIndex).get(); + assertEquals(2L, searchResponse.getHits().getTotalHits()); assertTrue(preUpgradeIsCalled.get()); assertTrue(postUpgradeIsCalled.get()); } + public void testIndexUpgradeInfoOnEmptyCluster() { + // On empty cluster asking for all indices shouldn't fail since no indices means nothing needs to be upgraded + Response response = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("_all").get(); + assertThat(response.getActions().entrySet(), empty()); + + // but calling on a particular index should fail + assertThrows(client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test"), IndexNotFoundException.class); + } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java index a24882743c6..247eb4e71ac 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java @@ -16,9 +16,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; -import java.util.Collections; import java.util.Map; -import java.util.function.BiFunction; +import java.util.function.Function; import static org.elasticsearch.xpack.upgrade.IndexUpgradeCheckTests.newTestIndexMeta; import static org.hamcrest.core.IsEqual.equalTo; @@ -26,7 +25,7 @@ import static org.hamcrest.core.IsEqual.equalTo; public class IndexUpgradeServiceTests extends ESTestCase { private IndexUpgradeCheck upgradeBarCheck = new IndexUpgradeCheck("upgrade_bar", Settings.EMPTY, - (BiFunction, UpgradeActionRequired>) (indexMetaData, stringStringMap) -> { + (Function) indexMetaData -> { if ("bar".equals(indexMetaData.getSettings().get("test.setting"))) { return UpgradeActionRequired.UPGRADE; } else { @@ -35,7 +34,7 @@ public class IndexUpgradeServiceTests extends ESTestCase { }, null, null, null, null); private IndexUpgradeCheck reindexFooCheck = new IndexUpgradeCheck("reindex_foo", Settings.EMPTY, - (BiFunction, UpgradeActionRequired>) (indexMetaData, stringStringMap) -> { + (Function) indexMetaData -> { if ("foo".equals(indexMetaData.getSettings().get("test.setting"))) { return UpgradeActionRequired.REINDEX; } else { @@ -44,10 +43,10 @@ public class IndexUpgradeServiceTests extends ESTestCase { }, null, null, null, null); private IndexUpgradeCheck everythingIsFineCheck = new IndexUpgradeCheck("everything_is_fine", Settings.EMPTY, - (indexMetaData, stringStringMap) -> UpgradeActionRequired.UP_TO_DATE, null, null, null, null); + indexMetaData -> UpgradeActionRequired.UP_TO_DATE, null, null, null, null); private IndexUpgradeCheck unreachableCheck = new IndexUpgradeCheck("unreachable", Settings.EMPTY, - (BiFunction, UpgradeActionRequired>) (indexMetaData, stringStringMap) -> { + (Function) indexMetaData -> { fail("Unreachable check is called"); return null; }, null, null, null, null); @@ -77,13 +76,13 @@ public class IndexUpgradeServiceTests extends ESTestCase { ClusterState clusterState = mockClusterState(fooIndex, barIndex, bazIndex); Map result = service.upgradeInfo(new String[]{"bar", "foo", "baz"}, - IndicesOptions.lenientExpandOpen(), Collections.emptyMap(), clusterState); + IndicesOptions.lenientExpandOpen(), clusterState); assertThat(result.size(), equalTo(2)); assertThat(result.get("bar"), equalTo(UpgradeActionRequired.UPGRADE)); assertThat(result.get("foo"), equalTo(UpgradeActionRequired.REINDEX)); - result = service.upgradeInfo(new String[]{"b*"}, IndicesOptions.lenientExpandOpen(), Collections.emptyMap(), clusterState); + result = service.upgradeInfo(new String[]{"b*"}, IndicesOptions.lenientExpandOpen(), clusterState); assertThat(result.size(), equalTo(1)); assertThat(result.get("bar"), equalTo(UpgradeActionRequired.UPGRADE)); @@ -103,7 +102,7 @@ public class IndexUpgradeServiceTests extends ESTestCase { ClusterState clusterState = mockClusterState(fooIndex, barIndex, bazIndex); Map result = service.upgradeInfo(new String[]{"bar", "foo", "baz"}, - IndicesOptions.lenientExpandOpen(), Collections.emptyMap(), clusterState); + IndicesOptions.lenientExpandOpen(), clusterState); assertThat(result.size(), equalTo(2)); assertThat(result.get("bar"), equalTo(UpgradeActionRequired.UPGRADE)); @@ -124,7 +123,7 @@ public class IndexUpgradeServiceTests extends ESTestCase { ClusterState clusterState = mockClusterState(fooIndex, barIndex, bazIndex); Map result = service.upgradeInfo(new String[]{"bar", "foo", "baz"}, - IndicesOptions.lenientExpandOpen(), Collections.emptyMap(), clusterState); + IndicesOptions.lenientExpandOpen(), clusterState); assertThat(result.size(), equalTo(0)); // everything as the first checker should indicate that everything is fine } @@ -142,7 +141,7 @@ public class IndexUpgradeServiceTests extends ESTestCase { ClusterState clusterState = mockClusterState(goodIndex, badIndex); Map result = service.upgradeInfo(new String[]{"good", "bad"}, - IndicesOptions.lenientExpandOpen(), Collections.emptyMap(), clusterState); + IndicesOptions.lenientExpandOpen(), clusterState); assertThat(result.size(), equalTo(1)); assertThat(result.get("bad"), equalTo(UpgradeActionRequired.REINDEX)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java new file mode 100644 index 00000000000..03b12a0e2b7 --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.upgrade; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.index.reindex.ReindexAction; +import org.elasticsearch.index.reindex.ReindexPlugin; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeAction; +import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeInfoAction; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +@ESIntegTestCase.ClusterScope(scope = TEST, supportsDedicatedMasters = false, numClientNodes = 0, maxNumDataNodes = 1) +public class IndexUpgradeTasksIT extends ESIntegTestCase { + + @Override + protected boolean ignoreExternalCluster() { + return true; + } + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(MockUpgradePlugin.class, ReindexPlugin.class); + } + + public static class MockUpgradePlugin extends Plugin implements ScriptPlugin, ActionPlugin { + + public static final String NAME = MockScriptEngine.NAME; + + private Settings settings; + private Upgrade upgrade; + + private CountDownLatch upgradeLatch = new CountDownLatch(1); + private CountDownLatch upgradeCalledLatch = new CountDownLatch(1); + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new MockScriptEngine(pluginScriptLang(), pluginScripts()); + } + + public String pluginScriptLang() { + return NAME; + } + + public MockUpgradePlugin(Settings settings) { + this.settings = settings; + this.upgrade = new Upgrade(settings); + Loggers.getLogger(IndexUpgradeTasksIT.class).info("MockUpgradePlugin is created"); + } + + + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + scripts.put("block", map -> { + upgradeCalledLatch.countDown(); + try { + assertThat(upgradeLatch.await(10, TimeUnit.SECONDS), equalTo(true)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + return null; + }); + return scripts; + } + + @Override + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry) { + return Collections.singletonList(new IndexUpgradeService(settings, Collections.singletonList( + new IndexUpgradeCheck("test", settings, + new Function() { + @Override + public UpgradeActionRequired apply(IndexMetaData indexMetaData) { + if ("test".equals(indexMetaData.getIndex().getName())) { + if (Upgrade.checkInternalIndexFormat(indexMetaData)) { + return UpgradeActionRequired.UP_TO_DATE; + } else { + return UpgradeActionRequired.UPGRADE; + } + } else { + return UpgradeActionRequired.NOT_APPLICABLE; + } + } + }, + client, clusterService, Strings.EMPTY_ARRAY, + new Script(ScriptType.INLINE, NAME, "block", Collections.emptyMap())) + ))); + } + + @Override + public List> getActions() { + return upgrade.getActions(); + } + + @Override + public Collection getRestHeaders() { + return upgrade.getRestHeaders(); + } + } + + @Override + protected Collection> transportClientPlugins() { + return nodePlugins(); + } + + public void testParentTasksDuringUpgrade() throws Exception { + logger.info("before getInstance"); + PluginsService pluginsService = internalCluster().getDataNodeInstance(PluginsService.class); + MockUpgradePlugin mockUpgradePlugin = pluginsService.filterPlugins(MockUpgradePlugin.class).get(0); + assertThat(mockUpgradePlugin, notNullValue()); + logger.info("after getInstance"); + + assertAcked(client().admin().indices().prepareCreate("test").get()); + client().prepareIndex("test", "doc", "1").setSource("{\"foo\": \"bar\"}", XContentType.JSON) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + + ensureYellow("test"); + + + IndexUpgradeInfoAction.Response infoResponse = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test").get(); + assertThat(infoResponse.getActions().keySet(), contains("test")); + assertThat(infoResponse.getActions().get("test"), equalTo(UpgradeActionRequired.UPGRADE)); + + + ActionFuture upgradeResponse = + client().prepareExecute(IndexUpgradeAction.INSTANCE).setIndex("test").execute(); + + + assertThat(mockUpgradePlugin.upgradeCalledLatch.await(10, TimeUnit.SECONDS), equalTo(true)); + ListTasksResponse response = client().admin().cluster().prepareListTasks().get(); + mockUpgradePlugin.upgradeLatch.countDown(); + + // Find the upgrade task group + TaskGroup upgradeGroup = null; + for (TaskGroup group : response.getTaskGroups()) { + if (IndexUpgradeAction.NAME.equals(group.getTaskInfo().getAction())) { + assertThat(upgradeGroup, nullValue()); + upgradeGroup = group; + } + } + assertThat(upgradeGroup, notNullValue()); + assertThat(upgradeGroup.getTaskInfo().isCancellable(), equalTo(true)); // The task should be cancellable + assertThat(upgradeGroup.getChildTasks(), hasSize(1)); // The reindex task should be a child + assertThat(upgradeGroup.getChildTasks().get(0).getTaskInfo().getAction(), equalTo(ReindexAction.NAME)); + + assertThat(upgradeResponse.get().getCreated(), equalTo(1L)); + } +} diff --git a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java index 6276deff09b..3b9c10b36bf 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.xpack.XPackPlugin; @@ -76,7 +77,7 @@ public class InternalIndexReindexerIT extends IndexUpgradeIntegTestCase { createTestIndex("test"); InternalIndexReindexer reindexer = createIndexReindexer(123, script("add_bar"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); - reindexer.upgrade("test", clusterState(), future); + reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); BulkByScrollResponse response = future.actionGet(); assertThat(response.getCreated(), equalTo(2L)); @@ -102,7 +103,7 @@ public class InternalIndexReindexerIT extends IndexUpgradeIntegTestCase { createTestIndex("test_v123"); InternalIndexReindexer reindexer = createIndexReindexer(123, script("add_bar"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); - reindexer.upgrade("test", clusterState(), future); + reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); assertThrows(future, ResourceAlreadyExistsException.class); // Make sure that the index is not marked as read-only @@ -115,7 +116,7 @@ public class InternalIndexReindexerIT extends IndexUpgradeIntegTestCase { client().admin().indices().prepareAliases().addAlias("test-foo", "test_v123").get(); InternalIndexReindexer reindexer = createIndexReindexer(123, script("add_bar"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); - reindexer.upgrade("test", clusterState(), future); + reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); assertThrows(future, InvalidIndexNameException.class); // Make sure that the index is not marked as read-only @@ -129,7 +130,7 @@ public class InternalIndexReindexerIT extends IndexUpgradeIntegTestCase { assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(settings).get()); InternalIndexReindexer reindexer = createIndexReindexer(123, script("add_bar"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); - reindexer.upgrade("test", clusterState(), future); + reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); assertThrows(future, IllegalStateException.class); // Make sure that the index is still marked as read-only @@ -148,7 +149,7 @@ public class InternalIndexReindexerIT extends IndexUpgradeIntegTestCase { client().prepareIndex("test", "doc").setSource("foo", "bar").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); InternalIndexReindexer reindexer = createIndexReindexer(123, script("fail"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); - reindexer.upgrade("test", clusterState(), future); + reindexer.upgrade(new TaskId("abc", 123), "test", clusterState(), future); assertThrows(future, RuntimeException.class); // Make sure that the index is not marked as read-only @@ -160,7 +161,7 @@ public class InternalIndexReindexerIT extends IndexUpgradeIntegTestCase { InternalIndexReindexer reindexer = createIndexReindexer(123, script("add_bar"), Strings.EMPTY_ARRAY); PlainActionFuture future = PlainActionFuture.newFuture(); - reindexer.upgrade("test", withRandomOldNode(), future); + reindexer.upgrade(new TaskId("abc", 123), "test", withRandomOldNode(), future); assertThrows(future, IllegalStateException.class); // Make sure that the index is not marked as read-only diff --git a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeActionRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeActionRequestTests.java index 92160ac42f0..fccb8143f1e 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeActionRequestTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeActionRequestTests.java @@ -8,16 +8,10 @@ package org.elasticsearch.xpack.upgrade.actions; import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeAction.Request; -import java.util.Collections; - public class IndexUpgradeActionRequestTests extends AbstractStreamableTestCase { @Override protected Request createTestInstance() { - Request request = new Request(randomAlphaOfLength(10)); - if (randomBoolean()) { - request.extraParams(Collections.singletonMap(randomAlphaOfLength(10), randomAlphaOfLength(20))); - } - return request; + return new Request(randomAlphaOfLength(10)); } @Override diff --git a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoActionRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoActionRequestTests.java index a02595ea67f..005abbe5130 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoActionRequestTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoActionRequestTests.java @@ -9,8 +9,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.xpack.upgrade.actions.IndexUpgradeInfoAction.Request; -import java.util.Collections; - public class IndexUpgradeInfoActionRequestTests extends AbstractStreamableTestCase { @Override protected Request createTestInstance() { @@ -20,9 +18,6 @@ public class IndexUpgradeInfoActionRequestTests extends AbstractStreamableTestCa indices[i] = randomAlphaOfLength(10); } Request request = new Request(indices); - if (randomBoolean()) { - request.extraParams(Collections.singletonMap(randomAlphaOfLength(10), randomAlphaOfLength(20))); - } if (randomBoolean()) { request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java index 1e7be6c1929..5e7b1b0f2a2 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherIndexingListenerTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.support.clock.ClockMock; import org.elasticsearch.xpack.watcher.WatcherIndexingListener.Configuration; import org.elasticsearch.xpack.watcher.WatcherIndexingListener.ShardAllocationConfiguration; @@ -373,7 +374,7 @@ public class WatcherIndexingListenerTests extends ESTestCase { Index index = new Index(Watch.INDEX, "foo"); ShardId shardId = new ShardId(index, 0); ShardRoutingState randomState = randomFrom(STARTED, RELOCATING); - ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, "current", true, + ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, "current", randomState == RELOCATING ? "other" : null, true, randomState); IndexRoutingTable indexRoutingTable = IndexRoutingTable.builder(index) .addShard(shardRouting).build(); @@ -651,6 +652,20 @@ public class WatcherIndexingListenerTests extends ESTestCase { assertThat(listener.getConfiguration(), is(INACTIVE)); } + public void testThatIndexingListenerIsInactiveWhenWatchExecutionIsNotDistributed() throws Exception { + listener.setConfiguration(INACTIVE); + Version oldVersion = VersionUtils.randomVersionBetween(random(), Version.V_5_6_0, Version.V_6_0_0_alpha2); + DiscoveryNode node = new DiscoveryNode("node_1", ESTestCase.buildNewFakeTransportAddress(), Collections.emptyMap(), + new HashSet<>(asList(DiscoveryNode.Role.values())), oldVersion); + + ClusterState state = ClusterState.builder(new ClusterName("my-cluster")) + .nodes(new DiscoveryNodes.Builder().masterNodeId("node_1").localNodeId("node_1").add(node)) + .build(); + listener.clusterChanged(new ClusterChangedEvent("something", state, state)); + + assertThat(listener.getConfiguration(), is(INACTIVE)); + } + // // helper methods // diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java index 27d19dee42d..a6a7b600a73 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherLifeCycleServiceTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.watcher.execution.TriggeredWatchStore; import org.elasticsearch.xpack.watcher.watch.Watch; @@ -40,6 +41,7 @@ import java.util.concurrent.ExecutorService; import static java.util.Arrays.asList; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.anyString; @@ -53,7 +55,6 @@ import static org.mockito.Mockito.when; public class WatcherLifeCycleServiceTests extends ESTestCase { - private ClusterService clusterService; private WatcherService watcherService; private WatcherLifeCycleService lifeCycleService; @@ -62,7 +63,7 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { ThreadPool threadPool = mock(ThreadPool.class); final ExecutorService executorService = EsExecutors.newDirectExecutorService(); when(threadPool.executor(anyString())).thenReturn(executorService); - clusterService = mock(ClusterService.class); + ClusterService clusterService = mock(ClusterService.class); Answer answer = invocationOnMock -> { AckedClusterStateUpdateTask updateTask = (AckedClusterStateUpdateTask) invocationOnMock.getArguments()[1]; updateTask.onAllNodesAcked(null); @@ -116,11 +117,10 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { .routingTable(RoutingTable.builder().add(watchRoutingTable).build()) .build(); - when(clusterService.state()).thenReturn(clusterState); when(watcherService.validate(clusterState)).thenReturn(true); when(watcherService.state()).thenReturn(WatcherState.STOPPED); - lifeCycleService.start(); + lifeCycleService.clusterChanged(new ClusterChangedEvent("foo", clusterState, clusterState)); verify(watcherService, times(1)).start(any(ClusterState.class)); verify(watcherService, never()).stop(anyString()); @@ -159,11 +159,11 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { DiscoveryNodes.Builder nodes = new DiscoveryNodes.Builder().masterNodeId("id1").localNodeId("id1"); ClusterState clusterState = ClusterState.builder(new ClusterName("my-cluster")) .nodes(nodes).build(); - when(clusterService.state()).thenReturn(clusterState); when(watcherService.state()).thenReturn(WatcherState.STOPPED); when(watcherService.validate(clusterState)).thenReturn(false); - lifeCycleService.start(); + lifeCycleService.clusterChanged(new ClusterChangedEvent("foo", clusterState, clusterState)); + verify(watcherService, never()).start(any(ClusterState.class)); verify(watcherService, never()).stop(anyString()); } @@ -172,10 +172,9 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { DiscoveryNodes.Builder nodes = new DiscoveryNodes.Builder().masterNodeId("id1").localNodeId("id1"); ClusterState clusterState = ClusterState.builder(new ClusterName("my-cluster")) .nodes(nodes).build(); - when(clusterService.state()).thenReturn(clusterState); when(watcherService.state()).thenReturn(WatcherState.STOPPING); - lifeCycleService.start(); + lifeCycleService.clusterChanged(new ClusterChangedEvent("foo", clusterState, clusterState)); verify(watcherService, never()).validate(any(ClusterState.class)); verify(watcherService, never()).start(any(ClusterState.class)); verify(watcherService, never()).stop(anyString()); @@ -195,7 +194,9 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { ).build(); IndexRoutingTable watchRoutingTable = IndexRoutingTable.builder(watchIndex) - .addShard(TestShardRouting.newShardRouting(shardId, "node_1", true, randomFrom(STARTED, RELOCATING))) + .addShard(randomBoolean() ? + TestShardRouting.newShardRouting(shardId, "node_1", true, STARTED) : + TestShardRouting.newShardRouting(shardId, "node_1", "node_2", true, RELOCATING)) .build(); ClusterState clusterStateWithLocalShards = ClusterState.builder(new ClusterName("my-cluster")) .nodes(nodes) @@ -205,7 +206,9 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { // shard moved over to node 2 IndexRoutingTable watchRoutingTableNode2 = IndexRoutingTable.builder(watchIndex) - .addShard(TestShardRouting.newShardRouting(shardId, "node_2", true, randomFrom(STARTED, RELOCATING))) + .addShard(randomBoolean() ? + TestShardRouting.newShardRouting(shardId, "node_2", true, STARTED) : + TestShardRouting.newShardRouting(shardId, "node_2", "node_1", true, RELOCATING)) .build(); ClusterState clusterStateWithoutLocalShards = ClusterState.builder(new ClusterName("my-cluster")) .nodes(nodes) @@ -238,8 +241,8 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { .build(); IndexRoutingTable previousWatchRoutingTable = IndexRoutingTable.builder(watchIndex) - .addShard(TestShardRouting.newShardRouting(secondShardId, "node_1", true, randomFrom(STARTED, RELOCATING))) - .addShard(TestShardRouting.newShardRouting(shardId, "node_2", true, randomFrom(STARTED, RELOCATING))) + .addShard(TestShardRouting.newShardRouting(secondShardId, "node_1", true, STARTED)) + .addShard(TestShardRouting.newShardRouting(shardId, "node_2", true, STARTED)) .build(); IndexMetaData indexMetaData = IndexMetaData.builder(Watch.INDEX) @@ -256,8 +259,8 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { .build(); IndexRoutingTable currentWatchRoutingTable = IndexRoutingTable.builder(watchIndex) - .addShard(TestShardRouting.newShardRouting(shardId, "node_1", false, randomFrom(STARTED, RELOCATING))) - .addShard(TestShardRouting.newShardRouting(secondShardId, "node_1", true, randomFrom(STARTED, RELOCATING))) + .addShard(TestShardRouting.newShardRouting(shardId, "node_1", false, STARTED)) + .addShard(TestShardRouting.newShardRouting(secondShardId, "node_1", true, STARTED)) .addShard(TestShardRouting.newShardRouting(shardId, "node_2", true, STARTED)) .build(); @@ -403,8 +406,88 @@ public class WatcherLifeCycleServiceTests extends ESTestCase { verify(watcherService, never()).start(any(ClusterState.class)); } + public void testWatcherPausesOnNonMasterWhenOldNodesHoldWatcherIndex() { + DiscoveryNodes nodes = new DiscoveryNodes.Builder() + .masterNodeId("node_1").localNodeId("node_2") + .add(newNode("node_1")) + .add(newNode("node_2")) + .add(newNode("oldNode", VersionUtils.randomVersionBetween(random(), Version.V_5_5_0, Version.V_6_0_0_alpha2))) + .build(); + + Index index = new Index(Watch.INDEX, "foo"); + ShardId shardId = new ShardId(index, 0); + IndexRoutingTable routingTable = IndexRoutingTable.builder(index) + .addShard(TestShardRouting.newShardRouting(shardId, "node_2", true, STARTED)) + .addShard(TestShardRouting.newShardRouting(shardId, "oldNode", false, STARTED)).build(); + + Settings.Builder indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.INDEX_FORMAT_SETTING.getKey(), 6); + + IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(Watch.INDEX).settings(indexSettings); + + ClusterState state = ClusterState.builder(new ClusterName("my-cluster")) + .nodes(nodes) + .routingTable(RoutingTable.builder().add(routingTable).build()) + .metaData(MetaData.builder().put(indexMetaDataBuilder)) + .build(); + + WatcherState watcherState = randomFrom(WatcherState.values()); + when(watcherService.state()).thenReturn(watcherState); + + lifeCycleService.clusterChanged(new ClusterChangedEvent("any", state, state)); + if (watcherState == WatcherState.STARTED || watcherState == WatcherState.STARTING) { + verify(watcherService).pauseExecution(any(String.class)); + } + } + + public void testWatcherStartsOnlyOnMasterWhenOldNodesAreInCluster() throws Exception { + DiscoveryNodes nodes = new DiscoveryNodes.Builder() + .masterNodeId("node_1").localNodeId("node_1") + .add(newNode("node_1")) + .add(newNode("node_2")) + .add(newNode("oldNode", VersionUtils.randomVersionBetween(random(), Version.V_5_5_0, Version.V_6_0_0_alpha2))) + .build(); + + ClusterState state = ClusterState.builder(new ClusterName("my-cluster")).nodes(nodes).build(); + when(watcherService.validate(eq(state))).thenReturn(true); + when(watcherService.state()).thenReturn(WatcherState.STOPPED); + + lifeCycleService.clusterChanged(new ClusterChangedEvent("any", state, state)); + verify(watcherService).start(any(ClusterState.class)); + } + + public void testDistributedWatchExecutionDisabledWith5xNodesInCluster() throws Exception { + DiscoveryNodes nodes = new DiscoveryNodes.Builder() + .masterNodeId("node_1").localNodeId("node_1") + .add(newNode("node_1")) + .add(newNode("node_2", VersionUtils.randomVersionBetween(random(), Version.V_5_5_0, Version.V_6_0_0_alpha2))) + .build(); + + ClusterState state = ClusterState.builder(new ClusterName("my-cluster")).nodes(nodes).build(); + + assertThat(WatcherLifeCycleService.isWatchExecutionDistributed(state), is(false)); + } + + public void testDistributedWatchExecutionEnabled() throws Exception { + DiscoveryNodes nodes = new DiscoveryNodes.Builder() + .masterNodeId("master_node").localNodeId("master_node") + .add(newNode("master_node", VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_beta1, Version.CURRENT))) + .add(newNode("data_node_6x", VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_beta1, Version.CURRENT))) + .build(); + ClusterState state = ClusterState.builder(new ClusterName("my-cluster")).nodes(nodes).build(); + + assertThat(WatcherLifeCycleService.isWatchExecutionDistributed(state), is(true)); + } + private static DiscoveryNode newNode(String nodeName) { + return newNode(nodeName, Version.CURRENT); + } + + private static DiscoveryNode newNode(String nodeName, Version version) { return new DiscoveryNode(nodeName, ESTestCase.buildNewFakeTransportAddress(), Collections.emptyMap(), - new HashSet<>(asList(DiscoveryNode.Role.values())), Version.CURRENT); + new HashSet<>(asList(DiscoveryNode.Role.values())), version); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginDisableTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginDisableTests.java index 904cb51c1dd..6967423f12c 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginDisableTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginDisableTests.java @@ -31,7 +31,7 @@ import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -@ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, randomDynamicTemplates = false, maxNumDataNodes = 3) +@ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, maxNumDataNodes = 3) public class WatcherPluginDisableTests extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java index e1a2f4a8225..0b97f11d9f2 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; @@ -81,11 +80,11 @@ public class CompareConditionSearchTests extends AbstractWatcherIntegrationTestC Clock.systemUTC()); SearchHit hit = new SearchHit(0, "1", new Text("type"), null); hit.score(1f); - hit.shard(new SearchShardTarget("a", new Index("a", "indexUUID"), 0)); + hit.shard(new SearchShardTarget("a", new Index("a", "indexUUID"), 0, null)); InternalSearchResponse internalSearchResponse = new InternalSearchResponse( new SearchHits(new SearchHit[]{hit}, 1L, 1f), null, null, null, false, false, 1); - SearchResponse response = new SearchResponse(internalSearchResponse, "", 3, 3, 500L, new ShardSearchFailure[0]); + SearchResponse response = new SearchResponse(internalSearchResponse, "", 3, 3, 0, 500L, new ShardSearchFailure[0]); WatchExecutionContext ctx = mockExecutionContext("_watch_name", new Payload.XContent(response)); assertThat(condition.execute(ctx).met(), is(true)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionSearchTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionSearchTests.java index b07bc79b222..d5ded151220 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionSearchTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionSearchTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.xpack.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; @@ -104,11 +103,11 @@ public class ScriptConditionSearchTests extends AbstractWatcherIntegrationTestCa mockScript("ctx.payload.hits?.hits[0]?._score == 1.0"), scriptService); SearchHit hit = new SearchHit(0, "1", new Text("type"), null); hit.score(1f); - hit.shard(new SearchShardTarget("a", new Index("a", "testUUID"), 0)); + hit.shard(new SearchShardTarget("a", new Index("a", "testUUID"), 0, null)); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(new SearchHits( new SearchHit[]{hit}, 1L, 1f), null, null, null, false, false, 1); - SearchResponse response = new SearchResponse(internalSearchResponse, "", 3, 3, 500L, new ShardSearchFailure[0]); + SearchResponse response = new SearchResponse(internalSearchResponse, "", 3, 3, 0, 500L, new ShardSearchFailure[0]); WatchExecutionContext ctx = mockExecutionContext("_watch_name", new Payload.XContent(response)); assertThat(condition.execute(ctx).met(), is(true)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java index 7483d5a90c3..4f3fc2a31a8 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java @@ -94,7 +94,7 @@ public class ScriptConditionTests extends ESTestCase { public void testExecute() throws Exception { ScriptCondition condition = new ScriptCondition(mockScript("ctx.payload.hits.total > 1"), scriptService); - SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 500L, new ShardSearchFailure[0]); + SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, new ShardSearchFailure[0]); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response)); assertFalse(condition.execute(ctx).met()); } @@ -102,7 +102,7 @@ public class ScriptConditionTests extends ESTestCase { public void testExecuteMergedParams() throws Exception { Script script = new Script(ScriptType.INLINE, "mockscript", "ctx.payload.hits.total > threshold", singletonMap("threshold", 1)); ScriptCondition executable = new ScriptCondition(script, scriptService); - SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 500L, new ShardSearchFailure[0]); + SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, new ShardSearchFailure[0]); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response)); assertFalse(executable.execute(ctx).met()); } @@ -115,7 +115,7 @@ public class ScriptConditionTests extends ESTestCase { parser.nextToken(); ScriptCondition executable = ScriptCondition.parse(scriptService, "_watch", parser); - SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 500L, new ShardSearchFailure[0]); + SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, new ShardSearchFailure[0]); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response)); assertFalse(executable.execute(ctx).met()); @@ -179,7 +179,7 @@ public class ScriptConditionTests extends ESTestCase { public void testScriptConditionThrowException() throws Exception { ScriptCondition condition = new ScriptCondition( mockScript("null.foo"), scriptService); - SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 500L, new ShardSearchFailure[0]); + SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, new ShardSearchFailure[0]); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response)); ScriptException exception = expectThrows(ScriptException.class, () -> condition.execute(ctx)); assertThat(exception.getMessage(), containsString("Error evaluating null.foo")); @@ -187,7 +187,7 @@ public class ScriptConditionTests extends ESTestCase { public void testScriptConditionReturnObjectThrowsException() throws Exception { ScriptCondition condition = new ScriptCondition(mockScript("return new Object()"), scriptService); - SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 500L, new ShardSearchFailure[0]); + SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, new ShardSearchFailure[0]); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response)); Exception exception = expectThrows(IllegalStateException.class, () -> condition.execute(ctx)); assertThat(exception.getMessage(), @@ -197,7 +197,7 @@ public class ScriptConditionTests extends ESTestCase { public void testScriptConditionAccessCtx() throws Exception { ScriptCondition condition = new ScriptCondition(mockScript("ctx.trigger.scheduled_time.getMillis() < new Date().time"), scriptService); - SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 500L, new ShardSearchFailure[0]); + SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, new ShardSearchFailure[0]); WatchExecutionContext ctx = mockExecutionContext("_name", new DateTime(DateTimeZone.UTC), new Payload.XContent(response)); Thread.sleep(10); assertThat(condition.execute(ctx).met(), is(true)); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index cfb470518dd..752b4b8bccd 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -125,15 +125,18 @@ public class TriggeredWatchStoreTests extends ESTestCase { final Index index = metaDataBuilder.get(TriggeredWatchStore.INDEX_NAME).getIndex(); IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(index); for (int i = 0; i < numShards; i++) { - ShardRoutingState state; + final ShardRoutingState state; + final String currentNodeId; if (numStartedShards-- > 0) { state = ShardRoutingState.STARTED; + currentNodeId = "_node_id"; } else { state = ShardRoutingState.UNASSIGNED; + currentNodeId = null; } ShardId shardId = new ShardId(index, 0); indexRoutingTableBuilder.addIndexShard(new IndexShardRoutingTable.Builder(shardId) - .addShard(TestShardRouting.newShardRouting(shardId, "_node_id", null, true, state, + .addShard(TestShardRouting.newShardRouting(shardId, currentNodeId, null, true, state, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, ""))) .build()); indexRoutingTableBuilder.addReplica(); @@ -180,7 +183,7 @@ public class TriggeredWatchStoreTests extends ESTestCase { BytesArray source = new BytesArray("{}"); SearchHit hit = new SearchHit(0, "first_foo", new Text(TriggeredWatchStore.DOC_TYPE), null); hit.version(1L); - hit.shard(new SearchShardTarget("_node_id", index, 0)); + hit.shard(new SearchShardTarget("_node_id", index, 0, null)); hit.sourceRef(source); SearchHits hits = new SearchHits(new SearchHit[]{hit}, 1, 1.0f); when(searchResponse1.getHits()).thenReturn(hits); @@ -192,12 +195,12 @@ public class TriggeredWatchStoreTests extends ESTestCase { // First return a scroll response with a single hit and then with no hits hit = new SearchHit(0, "second_foo", new Text(TriggeredWatchStore.DOC_TYPE), null); hit.version(1L); - hit.shard(new SearchShardTarget("_node_id", index, 0)); + hit.shard(new SearchShardTarget("_node_id", index, 0, null)); hit.sourceRef(source); hits = new SearchHits(new SearchHit[]{hit}, 1, 1.0f); SearchResponse searchResponse2 = new SearchResponse( - new InternalSearchResponse(hits, null, null, null, false, null, 1), "_scrollId1", 1, 1, 1, null); - SearchResponse searchResponse3 = new SearchResponse(InternalSearchResponse.empty(), "_scrollId2", 1, 1, 1, null); + new InternalSearchResponse(hits, null, null, null, false, null, 1), "_scrollId1", 1, 1, 0, 1, null); + SearchResponse searchResponse3 = new SearchResponse(InternalSearchResponse.empty(), "_scrollId2", 1, 1, 0, 1, null); doAnswer(invocation -> { SearchScrollRequest request = (SearchScrollRequest) invocation.getArguments()[0]; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java index 63b86f57ce3..a9ae8359bec 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherUtilsTests.java @@ -96,6 +96,7 @@ public class WatcherUtilsTests extends ESTestCase { BytesReference expectedSource = null; Script expectedTemplate = null; WatcherSearchTemplateRequest request; + boolean stored = false; if (randomBoolean()) { Map params = new HashMap<>(); if (randomBoolean()) { @@ -106,7 +107,8 @@ public class WatcherUtilsTests extends ESTestCase { } String text = randomAlphaOfLengthBetween(1, 5); ScriptType scriptType = randomFrom(ScriptType.values()); - expectedTemplate = new Script(scriptType, "mustache", text, params); + stored = scriptType == ScriptType.STORED; + expectedTemplate = new Script(scriptType, stored ? null : "mustache", text, params); request = new WatcherSearchTemplateRequest(expectedIndices, expectedTypes, expectedSearchType, expectedIndicesOptions, expectedTemplate); } else { @@ -130,7 +132,7 @@ public class WatcherUtilsTests extends ESTestCase { assertThat(result.getSearchType(), equalTo(expectedSearchType)); assertNotNull(result.getTemplate()); - assertThat(result.getTemplate().getLang(), equalTo("mustache")); + assertThat(result.getTemplate().getLang(), equalTo(stored ? null : "mustache")); if (expectedSource == null) { assertThat(result.getTemplate().getIdOrCode(), equalTo(expectedTemplate.getIdOrCode())); assertThat(result.getTemplate().getType(), equalTo(expectedTemplate.getType())); @@ -194,6 +196,7 @@ public class WatcherUtilsTests extends ESTestCase { builder.rawField("body", source); } Script template = null; + boolean stored = false; if (randomBoolean()) { Map params = new HashMap<>(); if (randomBoolean()) { @@ -204,7 +207,8 @@ public class WatcherUtilsTests extends ESTestCase { } String text = randomAlphaOfLengthBetween(1, 5); ScriptType scriptType = randomFrom(ScriptType.values()); - template = new Script(scriptType, "mustache", text, params); + stored = scriptType == ScriptType.STORED; + template = new Script(scriptType, stored ? null : "mustache", text, params); builder.field("template", template); } builder.endObject(); @@ -228,7 +232,7 @@ public class WatcherUtilsTests extends ESTestCase { assertThat(result.getTemplate().getIdOrCode(), equalTo(template.getIdOrCode())); assertThat(result.getTemplate().getType(), equalTo(template.getType())); assertThat(result.getTemplate().getParams(), equalTo(template.getParams())); - assertThat(result.getTemplate().getLang(), equalTo("mustache")); + assertThat(result.getTemplate().getLang(), equalTo(stored ? null : "mustache")); } } diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java index 6001734e368..b6111cb97e2 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java @@ -20,7 +20,7 @@ public class WatcherSearchTemplateRequestTests extends ESTestCase { public void testFromXContentWithTemplateDefaultLang() throws IOException { String source = "{\"template\":{\"id\":\"default-script\", \"params\":{\"foo\":\"bar\"}}}"; - assertTemplate(source, "default-script", "mustache", singletonMap("foo", "bar")); + assertTemplate(source, "default-script", null, singletonMap("foo", "bar")); } public void testFromXContentWithTemplateCustomLang() throws IOException { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index eb0b18f134a..b7b75e015cc 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -115,7 +115,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNot.not; -@ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, randomDynamicTemplates = false, maxNumDataNodes = 3) +@ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, maxNumDataNodes = 3) public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase { public static final String WATCHER_LANG = Script.DEFAULT_SCRIPT_LANG; diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java index 2a5b5010892..b9f13ff1c8e 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java @@ -235,7 +235,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase { .setContent(jsonBuilder().startObject().field("template").value(searchSourceBuilder).endObject().bytes(), XContentType.JSON) .get()); - Script template = new Script(ScriptType.STORED, "mustache", "my-template", Collections.emptyMap()); + Script template = new Script(ScriptType.STORED, null, "my-template", Collections.emptyMap()); WatcherSearchTemplateRequest searchRequest = new WatcherSearchTemplateRequest(new String[]{"events"}, new String[0], SearchType.DEFAULT, WatcherSearchTemplateRequest.DEFAULT_INDICES_OPTIONS, template); testConditionSearch(searchRequest); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java index 43d051b9be9..d69bc42974e 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java @@ -94,7 +94,8 @@ public class SearchInputTests extends ESTestCase { public void testExecute() throws Exception { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(SearchRequest.class); PlainActionFuture searchFuture = PlainActionFuture.newFuture(); - SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), "", 1, 1, 1234, ShardSearchFailure.EMPTY_ARRAY); + SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), "", 1, 1, 0, 1234, + ShardSearchFailure.EMPTY_ARRAY); searchFuture.onResponse(searchResponse); when(client.search(requestCaptor.capture())).thenReturn(searchFuture); @@ -133,7 +134,8 @@ public class SearchInputTests extends ESTestCase { public void testDifferentSearchType() throws Exception { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(SearchRequest.class); PlainActionFuture searchFuture = PlainActionFuture.newFuture(); - SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), "", 1, 1, 1234, ShardSearchFailure.EMPTY_ARRAY); + SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), "", 1, 1, 0, 1234, + ShardSearchFailure.EMPTY_ARRAY); searchFuture.onResponse(searchResponse); when(client.search(requestCaptor.capture())).thenReturn(searchFuture); @@ -174,7 +176,8 @@ public class SearchInputTests extends ESTestCase { public void testThatEmptyRequestBodyWorks() throws Exception { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(SearchRequest.class); PlainActionFuture searchFuture = PlainActionFuture.newFuture(); - SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), "", 1, 1, 1234, ShardSearchFailure.EMPTY_ARRAY); + SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), "", 1, 1, 0, 1234, + ShardSearchFailure.EMPTY_ARRAY); searchFuture.onResponse(searchResponse); when(client.search(requestCaptor.capture())).thenReturn(searchFuture); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java index 70b67cbc8bf..fa43a0e9f01 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchTransformTests.java @@ -74,7 +74,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.joda.time.DateTimeZone.UTC; -@ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, randomDynamicTemplates = false, supportsDedicatedMasters = false, +@ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, supportsDedicatedMasters = false, numDataNodes = 1) public class SearchTransformTests extends ESIntegTestCase { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 1d4d9968c09..b1282264175 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -123,7 +123,7 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas .setLang("mockscript") .setContent(new BytesArray("{\"script\" : \"['key3' : ctx.payload.key1 + ctx.payload.key2]\"}"), XContentType.JSON) .get()); - script = new Script(ScriptType.STORED, "mockscript", "my-script", Collections.emptyMap()); + script = new Script(ScriptType.STORED, null, "my-script", Collections.emptyMap()); } // put a watch that has watch level transform: diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java index 7229f7460f0..767f89aecdc 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transform/script/ScriptTransformTests.java @@ -58,7 +58,7 @@ public class ScriptTransformTests extends ESTestCase { ScriptService service = mock(ScriptService.class); ScriptType type = randomFrom(ScriptType.values()); Map params = Collections.emptyMap(); - Script script = new Script(type, "_lang", "_script", params); + Script script = new Script(type, type == ScriptType.STORED ? null : "_lang", "_script", params); ExecutableScript.Factory factory = mock(ExecutableScript.Factory.class); when(service.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)).thenReturn(factory); ExecutableScriptTransform transform = new ExecutableScriptTransform(new ScriptTransform(script), logger, service); @@ -86,7 +86,7 @@ public class ScriptTransformTests extends ESTestCase { ScriptService service = mock(ScriptService.class); ScriptType type = randomFrom(ScriptType.values()); Map params = Collections.emptyMap(); - Script script = new Script(type, "_lang", "_script", params); + Script script = new Script(type, type == ScriptType.STORED ? null : "_lang", "_script", params); ExecutableScript.Factory factory = mock(ExecutableScript.Factory.class); when(service.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)).thenReturn(factory); ExecutableScriptTransform transform = new ExecutableScriptTransform(new ScriptTransform(script), logger, service); @@ -112,7 +112,7 @@ public class ScriptTransformTests extends ESTestCase { ScriptService service = mock(ScriptService.class); ScriptType type = randomFrom(ScriptType.values()); Map params = Collections.emptyMap(); - Script script = new Script(type, "_lang", "_script", params); + Script script = new Script(type, type == ScriptType.STORED ? null : "_lang", "_script", params); ExecutableScript.Factory factory = mock(ExecutableScript.Factory.class); when(service.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)).thenReturn(factory); ExecutableScriptTransform transform = new ExecutableScriptTransform(new ScriptTransform(script), logger, service); @@ -186,10 +186,9 @@ public class ScriptTransformTests extends ESTestCase { public void testScriptConditionParserBadLang() throws Exception { ScriptTransformFactory transformFactory = new ScriptTransformFactory(Settings.builder().build(), createScriptService(tp)); - ScriptType scriptType = randomFrom(ScriptType.values()); String script = "return true"; XContentBuilder builder = jsonBuilder().startObject() - .field(scriptTypeField(scriptType), script) + .field(scriptTypeField(ScriptType.INLINE), script) .field("lang", "not_a_valid_lang") .startObject("params").field("key", "value").endObject() .endObject(); @@ -199,13 +198,7 @@ public class ScriptTransformTests extends ESTestCase { parser.nextToken(); ScriptTransform scriptCondition = transformFactory.parseTransform("_watch", parser); Exception e = expectThrows(IllegalArgumentException.class, () -> transformFactory.createExecutable(scriptCondition)); - if (scriptType == ScriptType.STORED) { - assertThat(e.getMessage(), containsString("unable to get stored script with unsupported lang [not_a_valid_lang]")); - assertWarnings("specifying the field [lang] for executing stored scripts is deprecated;" + - " use only the field [id] to specify an "); - } else { - assertThat(e.getMessage(), containsString("script_lang not supported [not_a_valid_lang]")); - } + assertThat(e.getMessage(), containsString("script_lang not supported [not_a_valid_lang]")); } static String scriptTypeField(ScriptType type) { diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/stats/WatchStatsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/stats/WatchStatsTests.java index 5fdb72579e6..73445d26ca3 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/stats/WatchStatsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/stats/WatchStatsTests.java @@ -54,7 +54,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.core.Is.is; @ESIntegTestCase.ClusterScope(scope = SUITE, numClientNodes = 0, transportClientRatio = 0, - randomDynamicTemplates = false, numDataNodes = 1, supportsDedicatedMasters = false) + numDataNodes = 1, supportsDedicatedMasters = false) public class WatchStatsTests extends AbstractWatcherIntegrationTestCase { private static CountDownLatch scriptStartedLatch = new CountDownLatch(1); diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/stats/WatcherStatsTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/stats/WatcherStatsTests.java index 42d01e25807..d8bf07953bf 100644 --- a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/stats/WatcherStatsTests.java +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/action/stats/WatcherStatsTests.java @@ -25,7 +25,7 @@ import static org.elasticsearch.xpack.watcher.trigger.schedule.Schedules.cron; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; -@ClusterScope(scope = TEST, numClientNodes = 0, transportClientRatio = 0, randomDynamicTemplates = false) +@ClusterScope(scope = TEST, numClientNodes = 0, transportClientRatio = 0) public class WatcherStatsTests extends AbstractWatcherIntegrationTestCase { @Override diff --git a/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportActionTests.java b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportActionTests.java new file mode 100644 index 00000000000..539db75fdbe --- /dev/null +++ b/plugin/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportActionTests.java @@ -0,0 +1,172 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.watcher.transport.actions; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.watcher.watch.Watch; +import org.junit.Before; +import org.mockito.ArgumentCaptor; + +import java.util.Collections; +import java.util.HashSet; + +import static java.util.Arrays.asList; +import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.hamcrest.Matchers.is; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class WatcherTransportActionTests extends ESTestCase { + + private MyTransportAction transportAction; + private ClusterService clusterService; + private TransportService transportService; + + @Before + public void createTransportAction() { + ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.executor(any())).thenReturn(EsExecutors.newDirectExecutorService()); + clusterService = mock(ClusterService.class); + transportService = mock(TransportService.class); + transportAction = new MyTransportAction(transportService, threadPool, clusterService); + } + + public void testThatRequestIsExecutedLocallyWithDistributedExecutionEnabled() throws Exception { + DiscoveryNodes nodes = new DiscoveryNodes.Builder() + .masterNodeId("master_node").localNodeId("data_node") + .add(newNode("master_node", Version.CURRENT)) + .add(newNode("data_node", Version.CURRENT)) + .build(); + + Index watchIndex = new Index(Watch.INDEX, "foo"); + ShardId shardId = new ShardId(watchIndex, 0); + IndexRoutingTable routingTable = IndexRoutingTable.builder(watchIndex) + .addShard(TestShardRouting.newShardRouting(shardId, "data_node", true, STARTED)).build(); + + ClusterState state = ClusterState.builder(new ClusterName("my-cluster")) + .nodes(nodes) + .routingTable(RoutingTable.builder().add(routingTable).build()) + .build(); + + when(clusterService.state()).thenReturn(state); + when(clusterService.localNode()).thenReturn(state.nodes().getLocalNode()); + MyActionRequest request = new MyActionRequest(); + PlainActionFuture future = PlainActionFuture.newFuture(); + + Task task = request.createTask(1, "type", "action", new TaskId("parent", 0)); + transportAction.doExecute(task, request, future); + MyActionResponse response = future.actionGet(1000); + assertThat(response.request, is(request)); + } + + public void testThatRequestIsExecutedByMasterWithDistributedExecutionDisabled() throws Exception { + DiscoveryNodes nodes = new DiscoveryNodes.Builder() + .masterNodeId("master_node").localNodeId("master_node") + .add(newNode("master_node", VersionUtils.randomVersionBetween(random(), Version.V_5_6_0, Version.V_6_0_0_alpha2))) + .build(); + ClusterState state = ClusterState.builder(new ClusterName("my-cluster")).nodes(nodes).build(); + + when(clusterService.state()).thenReturn(state); + when(clusterService.localNode()).thenReturn(state.nodes().getLocalNode()); + MyActionRequest request = new MyActionRequest(); + PlainActionFuture future = PlainActionFuture.newFuture(); + + Task task = request.createTask(1, "type", "action", new TaskId("parent", 0)); + transportAction.doExecute(task, request, future); + MyActionResponse response = future.actionGet(1000); + assertThat(response.request, is(request)); + } + + public void testThatRequestIsForwardedToMasterWithDistributedExecutionDisabled() throws Exception { + DiscoveryNodes nodes = new DiscoveryNodes.Builder() + .masterNodeId("master_node").localNodeId("non_master_node") + .add(newNode("master_node", VersionUtils.randomVersionBetween(random(), Version.V_5_6_0, Version.V_6_0_0_alpha2))) + .add(newNode("non_master_node", Version.CURRENT)) + .build(); + + ClusterState state = ClusterState.builder(new ClusterName("my-cluster")).nodes(nodes).build(); + + when(clusterService.state()).thenReturn(state); + when(clusterService.localNode()).thenReturn(state.nodes().getLocalNode()); + MyActionRequest request = new MyActionRequest(); + Task task = request.createTask(1, "type", "action", new TaskId("parent", 0)); + transportAction.doExecute(task, request, PlainActionFuture.newFuture()); + // dont wait for the future here, we would need to stub the action listener of the sendRequest call + + ArgumentCaptor nodeArgumentCaptor = ArgumentCaptor.forClass(DiscoveryNode.class); + verify(transportService).sendRequest(nodeArgumentCaptor.capture(), eq("my_action_name"), eq(request), any()); + assertThat(nodeArgumentCaptor.getValue().getId(), is("master_node")); + } + + private static DiscoveryNode newNode(String nodeName, Version version) { + return new DiscoveryNode(nodeName, ESTestCase.buildNewFakeTransportAddress(), Collections.emptyMap(), + new HashSet<>(asList(DiscoveryNode.Role.values())), version); + } + + private final class MyTransportAction extends WatcherTransportAction { + + MyTransportAction(TransportService transportService, ThreadPool threadPool, ClusterService clusterService) { + super(Settings.EMPTY, "my_action_name", transportService, threadPool, new ActionFilters(Collections.emptySet()), + new IndexNameExpressionResolver(Settings.EMPTY), new XPackLicenseState(), clusterService, MyActionRequest::new, + MyActionResponse::new); + } + + @Override + protected void masterOperation(MyActionRequest request, ClusterState state, + ActionListener listener) throws Exception { + listener.onResponse(new MyActionResponse(request)); + } + } + + private static final class MyActionResponse extends ActionResponse { + + MyActionRequest request; + + MyActionResponse(MyActionRequest request) { + super(); + this.request = request; + } + + MyActionResponse() {} + } + + private static final class MyActionRequest extends MasterNodeRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } + } +} \ No newline at end of file diff --git a/plugin/src/test/resources/org/elasticsearch/transport/actions b/plugin/src/test/resources/org/elasticsearch/transport/actions index a249e61e1c4..e0e6c4881fc 100644 --- a/plugin/src/test/resources/org/elasticsearch/transport/actions +++ b/plugin/src/test/resources/org/elasticsearch/transport/actions @@ -58,7 +58,6 @@ indices:monitor/shard_stores indices:monitor/stats indices:monitor/upgrade indices:data/read/explain -indices:data/read/field_stats indices:data/read/field_caps indices:data/read/get indices:data/read/xpack/graph/explore diff --git a/plugin/src/test/resources/org/elasticsearch/transport/handlers b/plugin/src/test/resources/org/elasticsearch/transport/handlers index 916a48af102..3e8d1d37502 100644 --- a/plugin/src/test/resources/org/elasticsearch/transport/handlers +++ b/plugin/src/test/resources/org/elasticsearch/transport/handlers @@ -47,7 +47,6 @@ indices:admin/upgrade indices:admin/upgrade[n] indices:admin/validate/query[s] indices:data/read/explain[s] -indices:data/read/field_stats[s] indices:data/read/field_caps[index] indices:data/read/field_caps[index][s] indices:data/read/get[s] @@ -64,6 +63,7 @@ indices:data/read/search[phase/query+fetch/scroll] indices:data/read/search[phase/query/id] indices:data/read/search[phase/query/scroll] indices:data/read/search[phase/query] +indices:data/read/search[can_match] internal:transport/proxy/indices:data/read/search[clear_scroll_contexts] internal:transport/proxy/indices:data/read/search[free_context/scroll] internal:transport/proxy/indices:data/read/search[free_context] @@ -74,6 +74,7 @@ internal:transport/proxy/indices:data/read/search[phase/query+fetch/scroll] internal:transport/proxy/indices:data/read/search[phase/query/id] internal:transport/proxy/indices:data/read/search[phase/query/scroll] internal:transport/proxy/indices:data/read/search[phase/query] +internal:transport/proxy/indices:data/read/search[can_match] indices:data/read/tv[s] indices:data/write/bulk[s] indices:data/write/bulk[s][p] diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.migration.get_assistance.json b/plugin/src/test/resources/rest-api-spec/api/xpack.migration.get_assistance.json index 89cc5b2d497..f1225b565e2 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.migration.get_assistance.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.migration.get_assistance.json @@ -27,10 +27,6 @@ "ignore_unavailable": { "type" : "boolean", "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" - }, - "kibana_indices": { - "type": "list", - "description": "A comma separated list of indices that should be treated as kibana indices" } } } diff --git a/plugin/src/test/resources/rest-api-spec/api/xpack.migration.upgrade.json b/plugin/src/test/resources/rest-api-spec/api/xpack.migration.upgrade.json index a13977ab40f..60b1668595d 100644 --- a/plugin/src/test/resources/rest-api-spec/api/xpack.migration.upgrade.json +++ b/plugin/src/test/resources/rest-api-spec/api/xpack.migration.upgrade.json @@ -12,12 +12,6 @@ "required" : true, "description" : "The name of the index" } - }, - "params": { - "kibana_indices": { - "type": "list", - "description": "A comma separated list of indices that should be treated as kibana indices" - } } } } diff --git a/plugin/src/test/resources/rest-api-spec/test/deprecation/10_basic.yml b/plugin/src/test/resources/rest-api-spec/test/deprecation/10_basic.yml index 85643d0afa2..705ca9f9692 100644 --- a/plugin/src/test/resources/rest-api-spec/test/deprecation/10_basic.yml +++ b/plugin/src/test/resources/rest-api-spec/test/deprecation/10_basic.yml @@ -11,5 +11,5 @@ setup: index: "*" - length: { cluster_settings: 0 } - length: { node_settings: 0 } - - length: { index_settings: 1 } + - length: { index_settings: 0 } diff --git a/plugin/src/test/resources/rest-api-spec/test/upgrade/10_basic.yml b/plugin/src/test/resources/rest-api-spec/test/upgrade/10_basic.yml index 33719e676ca..73fd76ad8bb 100644 --- a/plugin/src/test/resources/rest-api-spec/test/upgrade/10_basic.yml +++ b/plugin/src/test/resources/rest-api-spec/test/upgrade/10_basic.yml @@ -21,16 +21,6 @@ setup: indices.create: index: test1 - - do: - indices.create: - index: test2 - - - do: - index: - index: test2 - type: my_type - id: 1 - body: { "field1": "foo", "field2": "bar" } - do: indices.refresh: {} @@ -42,36 +32,8 @@ setup: - length: { indices: 0 } --- -"Upgrade info - all, but treat test2 as kibana": - - do: - xpack.migration.get_assistance: { index: _all, kibana_indices: test2 } - - - length: { indices: 1 } - - match: { indices.test2.action_required: "upgrade" } - ---- -"Upgrade test2 as kibana index": +"Upgrade test - should fail as index is already up to date": - do: - xpack.migration.upgrade: { index: test2, kibana_indices: test2 } - - - match: { total: 1 } - - match: { created: 1 } - - length: { failures: 0 } - - - do: - search: - index: test2_v6 - body: { "query" : { "match_all" : {} } } - - match: { hits.total: 1} - - match: { hits.hits.0._type: "doc"} - - match: { hits.hits.0._id: "my_type-1"} - - match: { hits.hits.0._source.my_type.field1: "foo"} - - match: { hits.hits.0._source.my_type.field2: "bar"} - - - do: - indices.get_alias: - index: test2_v6 - - - match: {test2_v6.aliases.test2: {}} - + catch: /illegal_state_exception/ + xpack.migration.upgrade: { index: test1 } diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index ff435e56fca..fec9b286fd2 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -28,7 +28,7 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> try { httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=${node.config.numNodes}&wait_for_status=yellow").openConnection(); httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("elastic:changeme".getBytes(StandardCharsets.UTF_8))); + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); httpURLConnection.setRequestMethod("GET"); httpURLConnection.setConnectTimeout(1000); httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes! @@ -116,12 +116,11 @@ subprojects { numBwcNodes = 2 numNodes = 2 clusterName = 'full-cluster-restart' + setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' waitCondition = waitWithAuth setting 'xpack.security.transport.ssl.enabled', 'true' setting 'xpack.ssl.keystore.path', 'testnode.jks' setting 'xpack.ssl.keystore.password', 'testnode' - setting 'xpack.security.authc.realms.native.type', 'native' - setting 'xpack.security.authc.realms.native.order', '0' dependsOn copyTestNodeKeystore extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks') if (withSystemKey) { @@ -152,11 +151,10 @@ subprojects { numNodes = 2 clusterName = 'full-cluster-restart' dataDir = { nodeNum -> oldClusterTest.nodes[nodeNum].dataDir } + setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' waitCondition = waitWithAuth setting 'xpack.ssl.keystore.path', 'testnode.jks' keystoreSetting 'xpack.ssl.keystore.secure_password', 'testnode' - setting 'xpack.security.authc.realms.native.type', 'native' - setting 'xpack.security.authc.realms.native.order', '0' dependsOn copyTestNodeKeystore extraConfigFile 'testnode.jks', new File(outputDir + '/testnode.jks') if (withSystemKey) { diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 81a1b1aff0e..5413c2dcd82 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -15,22 +15,48 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.NotEqualMessageBuilder; +import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xpack.ml.MachineLearningTemplateRegistry; +import org.elasticsearch.xpack.common.text.TextTemplate; import org.elasticsearch.xpack.security.SecurityClusterClientYamlTestCase; import org.elasticsearch.xpack.test.rest.XPackRestTestCase; +import org.elasticsearch.xpack.watcher.actions.logging.LoggingAction; +import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder; +import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; +import org.elasticsearch.xpack.watcher.support.xcontent.ObjectPath; +import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; +import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger; import org.junit.Before; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Base64; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; +import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.everyItem; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; public class FullClusterRestartIT extends ESRestTestCase { private final boolean runningAgainstOldCluster = Booleans.parseBoolean(System.getProperty("tests.is_old_cluster")); @@ -58,7 +84,7 @@ public class FullClusterRestartIT extends ESRestTestCase { @Override protected Settings restClientSettings() { - String token = "Basic " + Base64.getEncoder().encodeToString("elastic:changeme".getBytes(StandardCharsets.UTF_8)); + String token = "Basic " + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8)); return Settings.builder() .put(ThreadContext.PREFIX + ".Authorization", token) // we increase the timeout here to 90 seconds to handle long waits for a green @@ -116,6 +142,190 @@ public class FullClusterRestartIT extends ESRestTestCase { } } + public void testWatcher() throws Exception { + if (runningAgainstOldCluster) { + logger.info("Adding a watch on old cluster"); + client().performRequest("PUT", "_xpack/watcher/watch/bwc_watch", emptyMap(), + new StringEntity(loadWatch("simple-watch.json"), ContentType.APPLICATION_JSON)); + + logger.info("Adding a watch with \"fun\" throttle periods on old cluster"); + client().performRequest("PUT", "_xpack/watcher/watch/bwc_throttle_period", emptyMap(), + new StringEntity(loadWatch("throttle-period-watch.json"), ContentType.APPLICATION_JSON)); + + logger.info("Adding a watch with \"fun\" read timeout on old cluster"); + client().performRequest("PUT", "_xpack/watcher/watch/bwc_funny_timeout", emptyMap(), + new StringEntity(loadWatch("funny-timeout-watch.json"), ContentType.APPLICATION_JSON)); + + logger.info("Waiting for watch results index to fill up..."); + waitForYellow(".watches,bwc_watch_index,.watcher-history*"); + waitForHits("bwc_watch_index", 2); + waitForHits(".watcher-history*", 2); + logger.info("Done creating watcher-related indices"); + } else { + logger.info("testing against {}", oldClusterVersion); + waitForYellow(".watches,bwc_watch_index,.watcher-history*"); + + logger.info("checking that upgrade procedure on the new cluster is required"); + Map response = toMap(client().performRequest("GET", "/_xpack/migration/assistance")); + logger.info(response); + + @SuppressWarnings("unchecked") Map indices = (Map) response.get("indices"); + assertThat(indices.entrySet(), hasSize(1)); + assertThat(indices.get(".watches"), notNullValue()); + @SuppressWarnings("unchecked") Map index = (Map) indices.get(".watches"); + assertThat(index.get("action_required"), equalTo("upgrade")); + + logger.info("starting upgrade procedure on the new cluster"); + + Map params = Collections.singletonMap("error_trace", "true"); + Map upgradeResponse = toMap(client().performRequest("POST", "_xpack/migration/upgrade/.watches", params)); + assertThat(upgradeResponse.get("timed_out"), equalTo(Boolean.FALSE)); + // we posted 3 watches, but monitoring can post a few more + assertThat((int)upgradeResponse.get("total"), greaterThanOrEqualTo(3)); + + logger.info("checking that upgrade procedure on the new cluster is required again"); + Map responseAfter = toMap(client().performRequest("GET", "/_xpack/migration/assistance")); + @SuppressWarnings("unchecked") Map indicesAfter = (Map) responseAfter.get("indices"); + assertThat(indicesAfter.entrySet(), empty()); + + // Wait for watcher to actually start.... + Map startWatchResponse = toMap(client().performRequest("POST", "_xpack/watcher/_start")); + assertThat(startWatchResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertBusy(() -> { + Map statsWatchResponse = toMap(client().performRequest("GET", "_xpack/watcher/stats")); + @SuppressWarnings("unchecked") + List states = ((List) statsWatchResponse.get("stats")) + .stream().map(o -> ((Map) o).get("watcher_state")).collect(Collectors.toList()); + assertThat(states, everyItem(is("started"))); + }); + + try { + assertOldTemplatesAreDeleted(); + assertWatchIndexContentsWork(); + assertBasicWatchInteractions(); + } finally { + /* Shut down watcher after every test because watcher can be a bit finicky about shutting down when the node shuts + * down. This makes super sure it shuts down *and* causes the test to fail in a sensible spot if it doesn't shut down. + */ + Map stopWatchResponse = toMap(client().performRequest("POST", "_xpack/watcher/_stop")); + assertThat(stopWatchResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertBusy(() -> { + Map statsStoppedWatchResponse = toMap(client().performRequest("GET", "_xpack/watcher/stats")); + @SuppressWarnings("unchecked") + List states = ((List) statsStoppedWatchResponse.get("stats")) + .stream().map(o -> ((Map) o).get("watcher_state")).collect(Collectors.toList()); + assertThat(states, everyItem(is("stopped"))); + }); + } + } + } + + private String loadWatch(String watch) throws IOException { + return StreamsUtils.copyToStringFromClasspath("/org/elasticsearch/xpack/restart/" + watch); + } + + @SuppressWarnings("unchecked") + private void assertOldTemplatesAreDeleted() throws IOException { + Map templates = toMap(client().performRequest("GET", "/_template")); + assertThat(templates.keySet(), not(hasItems(is("watches"), startsWith("watch-history"), is("triggered_watches")))); + } + + @SuppressWarnings("unchecked") + private void assertWatchIndexContentsWork() throws Exception { + // Fetch a basic watch + Map bwcWatch = toMap(client().performRequest("GET", "_xpack/watcher/watch/bwc_watch")); + + logger.error("-----> {}", bwcWatch); + + assertThat(bwcWatch.get("found"), equalTo(true)); + Map source = (Map) bwcWatch.get("watch"); + assertEquals(1000, source.get("throttle_period_in_millis")); + int timeout = (int) timeValueSeconds(100).millis(); + assertThat(ObjectPath.eval("input.search.timeout_in_millis", source), equalTo(timeout)); + assertThat(ObjectPath.eval("actions.index_payload.transform.search.timeout_in_millis", source), equalTo(timeout)); + assertThat(ObjectPath.eval("actions.index_payload.index.index", source), equalTo("bwc_watch_index")); + assertThat(ObjectPath.eval("actions.index_payload.index.doc_type", source), equalTo("bwc_watch_type")); + assertThat(ObjectPath.eval("actions.index_payload.index.timeout_in_millis", source), equalTo(timeout)); + + // Fetch a watch with "fun" throttle periods + bwcWatch = toMap(client().performRequest("GET", "_xpack/watcher/watch/bwc_throttle_period")); + assertThat(bwcWatch.get("found"), equalTo(true)); + source = (Map) bwcWatch.get("watch"); + assertEquals(timeout, source.get("throttle_period_in_millis")); + assertThat(ObjectPath.eval("actions.index_payload.throttle_period_in_millis", source), equalTo(timeout)); + + /* + * Fetch a watch with a funny timeout to verify loading fractional time + * values. + */ + bwcWatch = toMap(client().performRequest("GET", "_xpack/watcher/watch/bwc_funny_timeout")); + assertThat(bwcWatch.get("found"), equalTo(true)); + source = (Map) bwcWatch.get("watch"); + + + Map attachments = ObjectPath.eval("actions.work.email.attachments", source); + Map attachment = (Map) attachments.get("test_report.pdf"); + Map request = ObjectPath.eval("http.request", attachment); + assertEquals(timeout, request.get("read_timeout_millis")); + assertEquals("https", request.get("scheme")); + assertEquals("example.com", request.get("host")); + assertEquals("{{ctx.metadata.report_url}}", request.get("path")); + assertEquals(8443, request.get("port")); + Map basic = ObjectPath.eval("auth.basic", request); + assertThat(basic, hasEntry("username", "Aladdin")); + // password doesn't come back because it is hidden + assertThat(basic, not(hasKey("password"))); + + Map history = toMap(client().performRequest("GET", ".watcher-history*/_search")); + Map hits = (Map) history.get("hits"); + assertThat((int) (hits.get("total")), greaterThanOrEqualTo(2)); + } + + private void assertBasicWatchInteractions() throws Exception { + + String watch = new WatchSourceBuilder() + .condition(AlwaysCondition.INSTANCE) + .trigger(ScheduleTrigger.builder(new IntervalSchedule(IntervalSchedule.Interval.seconds(1)))) + .addAction("awesome", LoggingAction.builder(new TextTemplate("test"))).buildAsBytes(XContentType.JSON).utf8ToString(); + Map put = toMap(client().performRequest("PUT", "_xpack/watcher/watch/new_watch", emptyMap(), + new StringEntity(watch, ContentType.APPLICATION_JSON))); + + logger.info(put); + + assertThat(put.get("created"), equalTo(true)); + assertThat(put.get("_version"), equalTo(1)); + + put = toMap(client().performRequest("PUT", "_xpack/watcher/watch/new_watch", emptyMap(), + new StringEntity(watch, ContentType.APPLICATION_JSON))); + assertThat(put.get("created"), equalTo(false)); + assertThat(put.get("_version"), equalTo(2)); + + Map get = toMap(client().performRequest("GET", "_xpack/watcher/watch/new_watch")); + assertThat(get.get("found"), equalTo(true)); + @SuppressWarnings("unchecked") Map source = (Map) get.get("watch"); + Map logging = ObjectPath.eval("actions.awesome.logging", source); + assertEquals("info", logging.get("level")); + assertEquals("test", logging.get("text")); + } + + private void waitForYellow(String indexName) throws IOException { + Map params = new HashMap<>(); + params.put("wait_for_status", "yellow"); + params.put("timeout", "30s"); + Map response = toMap(client().performRequest("GET", "/_cluster/health/" + indexName, params)); + assertThat(response.get("timed_out"), equalTo(Boolean.FALSE)); + } + + @SuppressWarnings("unchecked") + private void waitForHits(String indexName, int expectedHits) throws Exception { + assertBusy(() -> { + Map response = toMap(client().performRequest("GET", "/" + indexName + "/_search", singletonMap("size", "0"))); + Map hits = (Map) response.get("hits"); + int total = (int) hits.get("total"); + assertThat(total, greaterThanOrEqualTo(expectedHits)); + }, 30, TimeUnit.SECONDS); + } + static Map toMap(Response response) throws IOException { return toMap(EntityUtils.toString(response.getEntity())); } diff --git a/qa/full-cluster-restart/src/test/resources/org/elasticsearch/xpack/restart/funny-timeout-watch.json b/qa/full-cluster-restart/src/test/resources/org/elasticsearch/xpack/restart/funny-timeout-watch.json new file mode 100644 index 00000000000..b5f1902afc6 --- /dev/null +++ b/qa/full-cluster-restart/src/test/resources/org/elasticsearch/xpack/restart/funny-timeout-watch.json @@ -0,0 +1,38 @@ +{ + "trigger" : { + "schedule": { + "interval": "100s" + } + }, + "condition": { + "never": {} + }, + "actions": { + "work": { + "email": { + "to": "email@domain.com", + "subject": "Test Kibana PDF report", + "attachments": { + "test_report.pdf": { + "http": { + "content_type": "application/pdf", + "request": { + "read_timeout": "100s", + "scheme": "https", + "host": "example.com", + "path":"{{ctx.metadata.report_url}}", + "port": 8443, + "auth": { + "basic": { + "username": "Aladdin", + "password": "open sesame" + } + } + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/qa/full-cluster-restart/src/test/resources/org/elasticsearch/xpack/restart/simple-watch.json b/qa/full-cluster-restart/src/test/resources/org/elasticsearch/xpack/restart/simple-watch.json new file mode 100644 index 00000000000..ceeb128dac8 --- /dev/null +++ b/qa/full-cluster-restart/src/test/resources/org/elasticsearch/xpack/restart/simple-watch.json @@ -0,0 +1,40 @@ +{ + "trigger" : { + "schedule": { + "interval": "1s" + } + }, + "input" : { + "search" : { + "timeout": "100s", + "request" : { + "indices" : [ ".watches" ], + "body" : { + "query" : { "match_all" : {}}, + "size": 1 + } + } + } + }, + "condition" : { + "always" : {} + }, + "throttle_period": "1s", + "actions" : { + "index_payload" : { + "transform" : { + "search" : { + "request" : { + "body" : { "size": 1, "query" : { "match_all" : {} }} + }, + "timeout": "100s" + } + }, + "index" : { + "index" : "bwc_watch_index", + "doc_type" : "bwc_watch_type", + "timeout": "100s" + } + } + } +} \ No newline at end of file diff --git a/qa/full-cluster-restart/src/test/resources/org/elasticsearch/xpack/restart/throttle-period-watch.json b/qa/full-cluster-restart/src/test/resources/org/elasticsearch/xpack/restart/throttle-period-watch.json new file mode 100644 index 00000000000..736cf79ad73 --- /dev/null +++ b/qa/full-cluster-restart/src/test/resources/org/elasticsearch/xpack/restart/throttle-period-watch.json @@ -0,0 +1,27 @@ +{ + "trigger" : { + "schedule": { + "interval": "1s" + } + }, + "condition" : { + "never" : {} + }, + "throttle_period": "100s", + "actions" : { + "index_payload" : { + "throttle_period": "100s", + "transform" : { + "search" : { + "request" : { + "body" : { "size": 1, "query" : { "match_all" : {} }} + } + } + }, + "index" : { + "index" : "bwc_watch_index", + "doc_type" : "bwc_watch_type" + } + } + } +} \ No newline at end of file diff --git a/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/60_skip_shards.yml b/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/60_skip_shards.yml new file mode 100644 index 00000000000..ad27f58567a --- /dev/null +++ b/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/60_skip_shards.yml @@ -0,0 +1,96 @@ +--- +setup: + - skip: + features: headers + + - do: + cluster.health: + wait_for_status: yellow + - do: + xpack.security.put_user: + username: "joe" + body: > + { + "password": "s3krit", + "roles" : [ "x_cluster_role" ] + } + - do: + xpack.security.put_role: + name: "x_cluster_role" + body: > + { + "cluster": ["all"], + "indices": [ + { + "names": ["skip_shards_index", "my_remote_cluster:single_doc_index"], + "privileges": ["read"] + } + ] + } +--- +teardown: + - do: + xpack.security.delete_user: + username: "joe" + ignore: 404 + - do: + xpack.security.delete_role: + name: "x_cluster_role" + ignore: 404 +--- +"Test that remote indices are subject to shard skipping": + + - do: + indices.create: + index: skip_shards_index + body: + settings: + index: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + test_type: + properties: + created_at: + type: date + format: "yyyy-MM-dd" + + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "skip_shards_index", "_type": "test_type"}}' + - '{"f1": "local_cluster", "sort_field": 0, "created_at" : "2017-01-01"}' + + # check that we skip the remote shard + - do: + headers: { Authorization: "Basic am9lOnMza3JpdA==" } + search: + index: "skip_shards_index,my_remote_cluster:single_doc_index" + pre_filter_shard_size: 1 + body: { "size" : 10, "query" : { "range" : { "created_at" : { "gte" : "2016-02-01", "lt": "2018-02-01"} } } } + + - match: { hits.total: 1 } + - match: { hits.hits.0._index: "skip_shards_index"} + - match: { _shards.total: 2 } + - match: { _shards.successful: 2 } + - match: { _shards.skipped : 1} + - match: { _shards.failed: 0 } + - match: { hits.total: 1 } + + # check that we skip the local shard + - do: + headers: { Authorization: "Basic am9lOnMza3JpdA==" } + search: + index: "skip_shards_index,my_remote_cluster:single_doc_index" + pre_filter_shard_size: 1 + body: { "size" : 10, "query" : { "range" : { "created_at" : { "gte" : "2015-02-01", "lt": "2016-02-01"} } } } + + - match: { hits.total: 1 } + - match: { hits.hits.0._index: "my_remote_cluster:single_doc_index"} + - match: { _shards.total: 2 } + - match: { _shards.successful: 2 } + - match: { _shards.skipped : 1} + - match: { _shards.failed: 0 } + - match: { hits.total: 1 } + diff --git a/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml b/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml index 9aca1eba67e..6fa2b1e31a1 100644 --- a/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml +++ b/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml @@ -22,13 +22,37 @@ setup: "cluster": ["monitor"], "indices": [ { - "names": ["secure_alias", "test_index", "aliased_test_index", "field_caps_index_1", "field_caps_index_3"], + "names": ["single_doc_index", "secure_alias", "test_index", "aliased_test_index", "field_caps_index_1", + "field_caps_index_3"], "privileges": ["read", "read_cross_cluster"] } ] } --- "Index data and search on the remote cluster": + + - do: + indices.create: + index: single_doc_index + body: + settings: + index: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + test_type: + properties: + created_at: + type: date + format: "yyyy-MM-dd" + + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "single_doc_index", "_type": "test_type"}}' + - '{"f1": "remote_cluster", "sort_field": 1, "created_at" : "2016-01-01"}' + - do: indices.create: index: field_caps_index_1 diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index 55ed25ebdbc..2364d704e76 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -31,7 +31,7 @@ Closure waitWithAuth = { NodeInfo node, AntBuilder ant -> // TODO this sucks having to hardcode number of nodes, but node.config.numNodes isn't necessarily accurate for rolling httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health?wait_for_nodes=2&wait_for_status=yellow").openConnection(); httpURLConnection.setRequestProperty("Authorization", "Basic " + - Base64.getEncoder().encodeToString("elastic:changeme".getBytes(StandardCharsets.UTF_8))); + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8))); httpURLConnection.setRequestMethod("GET"); httpURLConnection.setConnectTimeout(1000); httpURLConnection.setReadTimeout(30000); // read needs to wait for nodes! @@ -114,6 +114,7 @@ subprojects { configure(extensions.findByName("${baseName}#oldClusterTestCluster")) { dependsOn copyTestNodeKeystore plugin ':x-pack-elasticsearch:plugin' + setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' distribution = 'zip' bwcVersion = version numBwcNodes = 2 @@ -146,6 +147,7 @@ subprojects { configure(extensions.findByName("${baseName}#mixedClusterTestCluster")) { dependsOn oldClusterTestRunner, "${baseName}#oldClusterTestCluster#node1.stop" plugin ':x-pack-elasticsearch:plugin' + setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' distribution = 'zip' clusterName = 'rolling-upgrade' unicastTransportUri = { seedNode, node, ant -> oldClusterTest.nodes.get(0).transportUri() } @@ -175,6 +177,7 @@ subprojects { configure(extensions.findByName("${baseName}#upgradedClusterTestCluster")) { dependsOn(mixedClusterTestRunner, "${baseName}#oldClusterTestCluster#node0.stop") plugin ':x-pack-elasticsearch:plugin' + setupCommand 'setupTestUser', 'bin/x-pack/users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'superuser' distribution = 'zip' clusterName = 'rolling-upgrade' unicastTransportUri = { seedNode, node, ant -> mixedClusterTest.nodes.get(0).transportUri() } @@ -228,7 +231,6 @@ subprojects { dependsOn = ["v${wireCompatVersions[-1]}#bwcTest"] } } - check.dependsOn(integTest) dependencies { diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index d630e6efe5a..b0ae538c073 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -56,7 +56,7 @@ public class UpgradeClusterClientYamlTestSuiteIT extends SecurityClusterClientYa @Override protected Settings restClientSettings() { - String token = "Basic " + Base64.getEncoder().encodeToString(("elastic:changeme").getBytes(StandardCharsets.UTF_8)); + String token = "Basic " + Base64.getEncoder().encodeToString(("test_user:x-pack-test-password").getBytes(StandardCharsets.UTF_8)); return Settings.builder() .put(ThreadContext.PREFIX + ".Authorization", token) // we increase the timeout here to 90 seconds to handle long waits for a green diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java index 8da564f3ce7..3c7e2b607aa 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/WatchBackwardsCompatibilityIT.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.upgrades; -import org.apache.http.HttpEntity; import org.apache.http.HttpHost; -import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; @@ -18,11 +16,12 @@ import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ObjectPath; import org.elasticsearch.xpack.watcher.condition.AlwaysCondition; -import org.elasticsearch.xpack.watcher.watch.Watch; import org.junit.Before; import java.io.IOException; @@ -35,7 +34,6 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.security.SecurityLifecycleService.SECURITY_TEMPLATE_NAME; import static org.elasticsearch.xpack.watcher.actions.ActionBuilders.loggingAction; import static org.elasticsearch.xpack.watcher.client.WatchSourceBuilders.watchBuilder; @@ -102,6 +100,8 @@ public class WatchBackwardsCompatibilityIT extends ESRestTestCase { }); nodes = buildNodeAndVersions(); + logger.info("Nodes in cluster before test: bwc [{}], new [{}], master [{}]", nodes.getBWCNodes(), nodes.getNewNodes(), + nodes.getMaster()); } @Override @@ -112,7 +112,7 @@ public class WatchBackwardsCompatibilityIT extends ESRestTestCase { @Override protected Settings restClientSettings() { String token = "Basic " + Base64.getEncoder() - .encodeToString(("elastic:changeme").getBytes(StandardCharsets.UTF_8)); + .encodeToString(("test_user:x-pack-test-password").getBytes(StandardCharsets.UTF_8)); return Settings.builder() .put(ThreadContext.PREFIX + ".Authorization", token) .build(); @@ -131,9 +131,6 @@ public class WatchBackwardsCompatibilityIT extends ESRestTestCase { }); } - // we have to have finish the upgrade API first to make this test work, so we can call it instead of - // https://github.com/elastic/x-pack-elasticsearch/issues/1303 - @AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/pull/1603") public void testWatchCrudApis() throws IOException { assumeFalse("new nodes is empty", nodes.getNewNodes().isEmpty()); @@ -146,23 +143,40 @@ public class WatchBackwardsCompatibilityIT extends ESRestTestCase { StringEntity entity = new StringEntity(bytesReference.utf8ToString(), ContentType.APPLICATION_JSON); - executeAgainstAllNodes(client -> { - fakeUpgradeFrom5x(client); + // execute upgrade if new nodes are in the cluster + executeUpgradeIfClusterHasNewNode(); - assertOK(client.performRequest("PUT", "/_xpack/watcher/watch/my-watch", Collections.emptyMap(), entity)); - assertOK(client.performRequest("GET", "/_xpack/watcher/watch/my-watch")); - assertOK(client.performRequest("POST", "/_xpack/watcher/watch/my-watch/_execute")); - assertOK(client.performRequest("PUT", "/_xpack/watcher/watch/my-watch/_deactivate")); - assertOK(client.performRequest("PUT", "/_xpack/watcher/watch/my-watch/_activate")); + executeAgainstAllNodes(client -> { + Map params = Collections.singletonMap("error_trace", "true"); + assertOK(client.performRequest("PUT", "/_xpack/watcher/watch/my-watch", params, entity)); + assertOK(client.performRequest("GET", "/_xpack/watcher/watch/my-watch", params)); + assertOK(client.performRequest("POST", "/_xpack/watcher/watch/my-watch/_execute", params)); + assertOK(client.performRequest("PUT", "/_xpack/watcher/watch/my-watch/_deactivate", params)); + assertOK(client.performRequest("PUT", "/_xpack/watcher/watch/my-watch/_activate", params)); }); } + public void executeUpgradeIfClusterHasNewNode() + throws IOException { + HttpHost[] newHosts = nodes.getNewNodes().stream().map(Node::getPublishAddress).toArray(HttpHost[]::new); + if (newHosts.length > 0) { + try (RestClient client = buildClient(restClientSettings(), newHosts)) { + logger.info("checking that upgrade procedure on the new cluster is required, hosts [{}]", Arrays.asList(newHosts)); + Map params = Collections.singletonMap("error_trace", "true"); + Map response = toMap(client().performRequest("GET", "_xpack/migration/assistance", params)); + String action = ObjectPath.evaluate(response, "indices.\\.watches.action_required"); + logger.info("migration assistance response [{}]", action); + if ("upgrade".equals(action)) { + client.performRequest("POST", "_xpack/migration/upgrade/.watches", params); + } + } + } + } + public void executeAgainstAllNodes(CheckedConsumer consumer) throws IOException { HttpHost[] newHosts = nodes.getNewNodes().stream().map(Node::getPublishAddress).toArray(HttpHost[]::new); HttpHost[] bwcHosts = nodes.getBWCNodes().stream().map(Node::getPublishAddress).toArray(HttpHost[]::new); - - logger.info("# of bwc nodes [{}], number of new nodes [{}]", Arrays.asList(bwcHosts), Arrays.asList(newHosts)); assertTrue("No nodes in cluster, cannot run any tests", newHosts.length > 0 || bwcHosts.length > 0); if (newHosts.length > 0) { @@ -182,22 +196,6 @@ public class WatchBackwardsCompatibilityIT extends ESRestTestCase { assertThat(response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); } - // This is needed for fake the upgrade from 5.x to 6.0, where a new watches template is created, that contains mapping for the status - // field, as _status will be moved to status - // This can be removed once the upgrade API supports everything - private void fakeUpgradeFrom5x(RestClient client) throws IOException { - BytesReference mappingJson = jsonBuilder().startObject().startObject("properties").startObject("status") - .field("type", "object") - .field("enabled", false) - .field("dynamic", true) - .endObject().endObject().endObject() - .bytes(); - HttpEntity data = new ByteArrayEntity(mappingJson.toBytesRef().bytes, ContentType.APPLICATION_JSON); - - Response response = client.performRequest("PUT", "/" + Watch.INDEX + "/_mapping/" + Watch.DOC_TYPE, Collections.emptyMap(), data); - assertOK(response); - } - private Nodes buildNodeAndVersions() throws IOException { Response response = client().performRequest("GET", "_nodes"); ObjectPath objectPath = ObjectPath.createFromResponse(response); @@ -306,7 +304,12 @@ public class WatchBackwardsCompatibilityIT extends ESRestTestCase { "id='" + id + '\'' + ", nodeName='" + nodeName + '\'' + ", version=" + version + + ", address=" + publishAddress + '}'; } } + + static Map toMap(Response response) throws IOException { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false); + } } diff --git a/qa/security-example-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java b/qa/security-example-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java index 3e0fb1755bf..d110a6ca071 100644 --- a/qa/security-example-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java +++ b/qa/security-example-extension/src/main/java/org/elasticsearch/example/realm/CustomRealm.java @@ -8,13 +8,13 @@ package org.elasticsearch.example.realm; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.xpack.security.authc.IncomingRequest; -import org.elasticsearch.xpack.security.authc.support.CharArrays; -import org.elasticsearch.xpack.security.user.User; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.AuthenticationToken; import org.elasticsearch.xpack.security.authc.Realm; import org.elasticsearch.xpack.security.authc.RealmConfig; +import org.elasticsearch.xpack.security.authc.support.CharArrays; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.security.user.User; public class CustomRealm extends Realm { @@ -49,13 +49,17 @@ public class CustomRealm extends Realm { } @Override - public void authenticate(AuthenticationToken authToken, ActionListener listener, IncomingRequest incomingRequest) { + public void authenticate(AuthenticationToken authToken, ActionListener listener) { UsernamePasswordToken token = (UsernamePasswordToken)authToken; final String actualUser = token.principal(); - if (KNOWN_USER.equals(actualUser) && CharArrays.constantTimeEquals(token.credentials().getChars(), KNOWN_PW.getChars())) { - listener.onResponse(new User(actualUser, ROLES)); + if (KNOWN_USER.equals(actualUser)) { + if (CharArrays.constantTimeEquals(token.credentials().getChars(), KNOWN_PW.getChars())) { + listener.onResponse(AuthenticationResult.success(new User(actualUser, ROLES))); + } else { + listener.onResponse(AuthenticationResult.unsuccessful("Invalid password for user " + actualUser, null)); + } } else { - listener.onResponse(null); + listener.onResponse(AuthenticationResult.notHandled()); } } diff --git a/qa/security-example-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmTests.java b/qa/security-example-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmTests.java index 0029706a57b..88470b12fbe 100644 --- a/qa/security-example-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmTests.java +++ b/qa/security-example-extension/src/test/java/org/elasticsearch/example/realm/CustomRealmTests.java @@ -10,16 +10,14 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.security.authc.IncomingRequest; -import org.elasticsearch.xpack.security.user.User; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.authc.AuthenticationResult; import org.elasticsearch.xpack.security.authc.RealmConfig; import org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.user.User; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; -import static org.mockito.Mockito.mock; public class CustomRealmTests extends ESTestCase { public void testAuthenticate() { @@ -27,9 +25,9 @@ public class CustomRealmTests extends ESTestCase { CustomRealm realm = new CustomRealm(new RealmConfig("test", Settings.EMPTY, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings))); SecureString password = CustomRealm.KNOWN_PW.clone(); UsernamePasswordToken token = new UsernamePasswordToken(CustomRealm.KNOWN_USER, password); - PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - realm.authenticate(token, plainActionFuture, mock(IncomingRequest.class)); - User user = plainActionFuture.actionGet(); + PlainActionFuture plainActionFuture = new PlainActionFuture<>(); + realm.authenticate(token, plainActionFuture); + User user = plainActionFuture.actionGet().getUser(); assertThat(user, notNullValue()); assertThat(user.roles(), equalTo(CustomRealm.ROLES)); assertThat(user.principal(), equalTo(CustomRealm.KNOWN_USER)); @@ -40,8 +38,9 @@ public class CustomRealmTests extends ESTestCase { CustomRealm realm = new CustomRealm(new RealmConfig("test", Settings.EMPTY, globalSettings, new Environment(globalSettings), new ThreadContext(globalSettings))); SecureString password = CustomRealm.KNOWN_PW.clone(); UsernamePasswordToken token = new UsernamePasswordToken(CustomRealm.KNOWN_USER + "1", password); - PlainActionFuture plainActionFuture = new PlainActionFuture<>(); - realm.authenticate(token, plainActionFuture, mock(IncomingRequest.class)); - assertThat(plainActionFuture.actionGet(), nullValue()); + PlainActionFuture plainActionFuture = new PlainActionFuture<>(); + realm.authenticate(token, plainActionFuture); + final AuthenticationResult result = plainActionFuture.actionGet(); + assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.CONTINUE)); } } diff --git a/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml b/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml index 8c3788d5103..30284ab1645 100644 --- a/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml +++ b/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml @@ -93,12 +93,17 @@ setup: } - do: - put_template: + put_script: id: "1" body: > { - "term" : { - "username" : "{{_user.username}}" + "script": { + "lang": "mustache", + "source": { + "term" : { + "username" : "{{_user.username}}" + } + } } } diff --git a/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml b/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml index 68c9bba7432..0c6bd4cbdac 100644 --- a/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml +++ b/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml @@ -93,12 +93,17 @@ setup: } - do: - put_template: + put_script: id: "1" body: > { - "term" : { - "username" : "{{_user.username}}" + "script": { + "lang": "mustache", + "source": { + "term" : { + "username" : "{{_user.username}}" + } + } } } diff --git a/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/30_search_input.yml b/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/30_search_input.yml index 24ceb3fcd3d..28e2788fedb 100644 --- a/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/30_search_input.yml +++ b/qa/smoke-test-watcher-with-mustache/src/test/resources/rest-api-spec/test/watcher_mustache/30_search_input.yml @@ -102,18 +102,23 @@ setup: "Test search input mustache integration (using request template)": - do: - put_template: + put_script: id: "search-template" body: { - "query" : { - "bool" : { - "must" : [ - { - "term" : { - "value" : "val_{{num}}" - } + "script": { + "lang": "mustache", + "source": { + "query" : { + "bool" : { + "must" : [ + { + "term" : { + "value" : "val_{{num}}" + } + } + ] } - ] + } } } } @@ -157,5 +162,4 @@ setup: # makes sure that the mustache template snippets have been resolved correctly: - match: { "watch_record.result.input.search.request.body.query.bool.must.0.term.value": "val_2" } - match: { "watch_record.result.input.search.request.template.id": "search-template" } - - match: { "watch_record.result.input.search.request.template.lang": "mustache" } - match: { "watch_record.result.input.search.request.template.params.num": 2 } diff --git a/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/test/TribeWithSecurityIT.java b/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/test/TribeWithSecurityIT.java index a948831c1b7..5b7c7c9f40e 100644 --- a/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/test/TribeWithSecurityIT.java +++ b/qa/tribe-tests-with-security/src/test/java/org/elasticsearch/test/TribeWithSecurityIT.java @@ -12,6 +12,7 @@ import org.apache.http.HttpHost; import org.apache.http.entity.ContentType; import org.apache.http.message.BasicHeader; import org.apache.http.nio.entity.NStringEntity; +import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; @@ -96,24 +97,6 @@ public class TribeWithSecurityIT extends SecurityIntegTestCase { public void addSecurityIndex() throws IOException { client().admin().indices().prepareCreate(INTERNAL_SECURITY_INDEX).get(); cluster2.client().admin().indices().prepareCreate(INTERNAL_SECURITY_INDEX).get(); - - InetSocketAddress[] inetSocketAddresses = cluster2.httpAddresses(); - List hosts = new ArrayList<>(); - for (InetSocketAddress socketAddress : inetSocketAddresses) { - hosts.add(new HttpHost(socketAddress.getAddress(), socketAddress.getPort())); - } - - RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[hosts.size()])); - RestClient client = builder.build(); - SecureString defaultPassword = new SecureString("".toCharArray()); - - String payload = "{\"password\": \"" + SecuritySettingsSource.TEST_PASSWORD + "\"}"; - HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); - BasicHeader authHeader = new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, - UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, defaultPassword)); - String route = "/_xpack/security/user/elastic/_password"; - Response response = getRestClient().performRequest("PUT", route, Collections.emptyMap(), entity, authHeader); - client.close(); } @Override @@ -135,6 +118,7 @@ public class TribeWithSecurityIT extends SecurityIntegTestCase { return new ExternalTestCluster(createTempDir(), externalClusterClientSettings(), transportClientPlugins(), transportAddresses); } + @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/1996") public void testThatTribeCanAuthenticateElasticUser() throws Exception { ClusterHealthResponse response = tribeNode.client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("elastic", SecuritySettingsSource.TEST_PASSWORD_SECURE_STRING))) @@ -142,6 +126,7 @@ public class TribeWithSecurityIT extends SecurityIntegTestCase { assertNoTimeout(response); } + @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/1996") public void testThatTribeCanAuthenticateElasticUserWithChangedPassword() throws Exception { securityClient(client()).prepareChangePassword("elastic", "password".toCharArray()).get(); @@ -152,6 +137,7 @@ public class TribeWithSecurityIT extends SecurityIntegTestCase { assertNoTimeout(response); } + @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/1996") public void testThatTribeClustersHaveDifferentPasswords() throws Exception { securityClient().prepareChangePassword("elastic", "password".toCharArray()).get(); securityClient(cluster2.client()).prepareChangePassword("elastic", "password2".toCharArray()).get();